src
stringlengths 721
1.04M
|
|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''Finds yaml tests, converts them to Java tests.'''
from __future__ import print_function
import sys
import os
import os.path
import re
import time
import ast
import argparse
import metajava
import process_polyglot
import logging
from process_polyglot import Unhandled, Skip, FatalSkip, SkippedTest
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from collections import namedtuple
sys.path.append(
os.path.abspath(os.path.join(__file__, "../../../test/common")))
import parsePolyglot
parsePolyglot.printDebug = False
logger = logging.getLogger("convert_tests")
# Supplied by import_python_driver
r = None
TEST_EXCLUSIONS = [
# python only tests
# 'regression/1133',
# 'regression/767',
# 'regression/1005',
'regression/',
'limits', # pending fix in issue #4965
# double run
'changefeeds/squash',
# arity checked at compile time
'arity',
'.rb.yaml',
]
def main():
logging.basicConfig(format="[%(name)s] %(message)s", level=logging.INFO)
start = time.clock()
args = parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
logging.getLogger('process_polyglot').setLevel(logging.DEBUG)
elif args.info:
logger.setLevel(logging.INFO)
logging.getLogger('process_polyglot').setLevel(logging.INFO)
else:
logger.root.setLevel(logging.WARNING)
if args.e:
evaluate_snippet(args.e)
exit(0)
global r
r = import_python_driver(args.python_driver_dir)
renderer = metajava.Renderer(
args.template_dir,
invoking_filenames=[
__file__,
process_polyglot.__file__,
])
for testfile in process_polyglot.all_yaml_tests(
args.test_dir,
TEST_EXCLUSIONS):
logger.info("Working on %s", testfile)
TestFile(
test_dir=args.test_dir,
filename=testfile,
test_output_dir=args.test_output_dir,
renderer=renderer,
).load().render()
logger.info("Finished in %s seconds", time.clock() - start)
def parse_args():
'''Parse command line arguments'''
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--test-dir",
help="Directory where yaml tests are",
default="../../test/rql_test/src"
)
parser.add_argument(
"--test-output-dir",
help="Directory to render tests to",
default="./src/test/java/gen",
)
parser.add_argument(
"--template-dir",
help="Where to find test generation templates",
default="./templates",
)
parser.add_argument(
"--python-driver-dir",
help="Where the built python driver is located",
default="../../build/drivers/python"
)
parser.add_argument(
"--test-file",
help="Only convert the specified yaml file",
)
parser.add_argument(
'--debug',
help="Print debug output",
dest='debug',
action='store_true')
parser.set_defaults(debug=False)
parser.add_argument(
'--info',
help="Print info level output",
dest='info',
action='store_true')
parser.set_defaults(info=False)
parser.add_argument(
'-e',
help="Convert an inline python reql to java reql snippet",
)
return parser.parse_args()
def import_python_driver(py_driver_dir):
'''Imports the test driver header'''
stashed_path = sys.path
sys.path.insert(0, os.path.realpath(py_driver_dir))
import rethinkdb as r
sys.path = stashed_path
return r
JavaQuery = namedtuple(
'JavaQuery',
('line',
'expected_type',
'expected_line',
'testfile',
'line_num',
'runopts')
)
JavaDef = namedtuple(
'JavaDef',
('line',
'varname',
'vartype',
'value',
'run_if_query',
'testfile',
'line_num',
'runopts')
)
Version = namedtuple("Version", "original java")
JAVA_DECL = re.compile(r'(?P<type>.+) (?P<var>\w+) = (?P<value>.*);')
def evaluate_snippet(snippet):
'''Just converts a single expression snippet into java'''
try:
parsed = ast.parse(snippet, mode='eval').body
except Exception as e:
return print("Error:", e)
try:
print(ReQLVisitor(smart_bracket=True).convert(parsed))
except Exception as e:
return print("Error:", e)
class TestFile(object):
'''Represents a single test file'''
def __init__(self, test_dir, filename, test_output_dir, renderer):
self.filename = filename
self.full_path = os.path.join(test_dir, filename)
self.module_name = metajava.camel(
filename.split('.')[0].replace('/', '_'))
self.test_output_dir = test_output_dir
self.reql_vars = {'r'}
self.renderer = renderer
def load(self):
'''Load the test file, yaml parse it, extract file-level metadata'''
with open(self.full_path) as f:
parsed_yaml = parsePolyglot.parseYAML(f)
self.description = parsed_yaml.get('desc', 'No description')
self.table_var_names = self.get_varnames(parsed_yaml)
self.reql_vars.update(self.table_var_names)
self.raw_test_data = parsed_yaml['tests']
self.test_generator = process_polyglot.tests_and_defs(
self.filename,
self.raw_test_data,
context=process_polyglot.create_context(r, self.table_var_names),
custom_field='java',
)
return self
def get_varnames(self, yaml_file):
'''Extract table variable names from yaml variable
They can be specified just space separated, or comma separated'''
raw_var_names = yaml_file.get('table_variable_name', '')
if not raw_var_names:
return set()
return set(re.split(r'[, ]+', raw_var_names))
def render(self):
'''Renders the converted tests to a runnable test file'''
defs_and_test = ast_to_java(self.test_generator, self.reql_vars)
self.renderer.render(
'Test.java',
output_dir=self.test_output_dir,
output_name=self.module_name + '.java',
dependencies=[self.full_path],
defs_and_test=defs_and_test,
table_var_names=list(sorted(self.table_var_names)),
module_name=self.module_name,
JavaQuery=JavaQuery,
JavaDef=JavaDef,
description=self.description,
)
def py_to_java_type(py_type):
'''Converts python types to their Java equivalents'''
if py_type is None:
return None
elif isinstance(py_type, str):
# This can be called on something already converted
return py_type
elif py_type.__name__ == 'function':
return 'ReqlFunction1'
elif (py_type.__module__ == 'datetime' and
py_type.__name__ == 'datetime'):
return 'OffsetDateTime'
elif py_type.__module__ == 'builtins':
return {
bool: 'Boolean',
bytes: 'byte[]',
int: 'Long',
float: 'Double',
str: 'String',
dict: 'Map',
list: 'List',
object: 'Object',
type(None): 'Object',
}[py_type]
elif py_type.__module__ == 'rethinkdb.ast':
# Anomalous non-rule based capitalization in the python driver
return {
'DB': 'Db'
}.get(py_type.__name__, py_type.__name__)
elif py_type.__module__ == 'rethinkdb.errors':
return py_type.__name__
elif py_type.__module__ == '?test?':
return {
'uuid': 'UUIDMatch', # clashes with ast.Uuid
}.get(py_type.__name__, metajava.camel(py_type.__name__))
elif py_type.__module__ == 'rethinkdb.query':
# All of the constants like minval maxval etc are defined in
# query.py, but no type name is provided to `type`, so we have
# to pull it out of a class variable
return metajava.camel(py_type.st)
else:
raise Unhandled(
"Don't know how to convert python type {}.{} to java"
.format(py_type.__module__, py_type.__name__))
def is_reql(t):
'''Determines if a type is a reql term'''
# Other options for module: builtins, ?test?, datetime
return t.__module__ == 'rethinkdb.ast'
def escape_string(s, out):
out.write('"')
for codepoint in s:
rpr = repr(codepoint)[1:-1]
if rpr.startswith('\\x'):
# Python will shorten unicode escapes that are less than a
# byte to use \x instead of \u . Java doesn't accept \x so
# we have to expand it back out.
rpr = '\\u00' + rpr[2:]
elif rpr == '"':
rpr = r'\"'
out.write(rpr)
out.write('"')
def attr_matches(path, node):
'''Helper function. Several places need to know if they are an
attribute of some root object'''
root, name = path.split('.')
ret = is_name(root, node.value) and node.attr == name
return ret
def is_name(name, node):
'''Determine if the current attribute node is a Name with the
given name'''
return type(node) == ast.Name and node.id == name
def def_to_java(item, reql_vars):
if is_reql(item.term.type):
reql_vars.add(item.varname)
try:
if is_reql(item.term.type):
visitor = ReQLVisitor
else:
visitor = JavaVisitor
java_line = visitor(reql_vars,
type_=item.term.type,
is_def=True,
).convert(item.term.ast)
except Skip as skip:
return SkippedTest(line=item.term.line, reason=str(skip))
java_decl = JAVA_DECL.match(java_line).groupdict()
return JavaDef(
line=Version(
original=item.term.line,
java=java_line,
),
varname=java_decl['var'],
vartype=java_decl['type'],
value=java_decl['value'],
run_if_query=item.run_if_query,
testfile=item.testfile,
line_num=item.line_num,
runopts=convert_runopts(reql_vars, java_decl['type'], item.runopts)
)
def convert_runopts(reql_vars, type_, runopts):
if runopts is None:
return None
return {
key: JavaVisitor(
reql_vars, type_=type_).convert(val)
for key, val in runopts.items()
}
def query_to_java(item, reql_vars):
if item.runopts is not None:
converted_runopts = convert_runopts(
reql_vars, item.query.type, item.runopts)
else:
converted_runopts = item.runopts
try:
java_line = ReQLVisitor(
reql_vars, type_=item.query.type).convert(item.query.ast)
if is_reql(item.expected.type):
visitor = ReQLVisitor
else:
visitor = JavaVisitor
java_expected_line = visitor(
reql_vars, type_=item.expected.type)\
.convert(item.expected.ast)
except Skip as skip:
return SkippedTest(line=item.query.line, reason=str(skip))
return JavaQuery(
line=Version(
original=item.query.line,
java=java_line,
),
expected_type=py_to_java_type(item.expected.type),
expected_line=Version(
original=item.expected.line,
java=java_expected_line,
),
testfile=item.testfile,
line_num=item.line_num,
runopts=converted_runopts,
)
def ast_to_java(sequence, reql_vars):
'''Converts the the parsed test data to java source lines using the
visitor classes'''
reql_vars = set(reql_vars)
for item in sequence:
if type(item) == process_polyglot.Def:
yield def_to_java(item, reql_vars)
elif type(item) == process_polyglot.CustomDef:
yield JavaDef(line=Version(item.line, item.line),
testfile=item.testfile,
line_num=item.line_num)
elif type(item) == process_polyglot.Query:
yield query_to_java(item, reql_vars)
elif type(item) == SkippedTest:
yield item
else:
assert False, "shouldn't happen, item was {}".format(item)
class JavaVisitor(ast.NodeVisitor):
'''Converts python ast nodes into a java string'''
def __init__(self,
reql_vars=frozenset("r"),
out=None,
type_=None,
is_def=False,
smart_bracket=False,
):
self.out = StringIO() if out is None else out
self.reql_vars = reql_vars
self.type = py_to_java_type(type_)
self._type = type_
self.is_def = is_def
self.smart_bracket = smart_bracket
super(JavaVisitor, self).__init__()
self.write = self.out.write
def skip(self, message, *args, **kwargs):
cls = Skip
is_fatal = kwargs.pop('fatal', False)
if self.is_def or is_fatal:
cls = FatalSkip
raise cls(message, *args, **kwargs)
def convert(self, node):
'''Convert a text line to another text line'''
self.visit(node)
return self.out.getvalue()
def join(self, sep, items):
first = True
for item in items:
if first:
first = False
else:
self.write(sep)
self.visit(item)
def to_str(self, s):
escape_string(s, self.out)
def cast_null(self, arg, cast='ReqlExpr'):
'''Emits a cast to (ReqlExpr) if the node represents null'''
if (type(arg) == ast.Name and arg.id == 'null') or \
(type(arg) == ast.NameConstant and arg.value == "None"):
self.write("(")
self.write(cast)
self.write(") ")
self.visit(arg)
def to_args(self, args, optargs=[]):
self.write("(")
if args:
self.cast_null(args[0])
for arg in args[1:]:
self.write(', ')
self.cast_null(arg)
self.write(")")
for optarg in optargs:
self.write(".optArg(")
self.to_str(optarg.arg)
self.write(", ")
self.visit(optarg.value)
self.write(")")
def generic_visit(self, node):
logger.error("While translating: %s", ast.dump(node))
logger.error("Got as far as: %s", ''.join(self.out))
raise Unhandled("Don't know what this thing is: " + str(type(node)))
def visit_Assign(self, node):
if len(node.targets) != 1:
Unhandled("We only support assigning to one variable")
self.write(self.type + " ")
self.write(node.targets[0].id)
self.write(" = (")
self.write(self.type)
self.write(") (")
if is_reql(self._type):
ReQLVisitor(self.reql_vars,
out=self.out,
type_=self.type,
is_def=True,
).visit(node.value)
else:
self.visit(node.value)
self.write(");")
def visit_Str(self, node):
self.to_str(node.s)
def visit_Bytes(self, node, skip_prefix=False, skip_suffix=False):
if not skip_prefix:
self.write("new byte[]{")
for i, byte in enumerate(node.s):
if i > 0:
self.write(", ")
# Java bytes are signed :(
if byte > 127:
self.write(str(-(256 - byte)))
else:
self.write(str(byte))
if not skip_suffix:
self.write("}")
else:
self.write(", ")
def visit_Name(self, node):
name = node.id
if name == 'frozenset':
self.skip("can't convert frozensets to GroupedData yet")
if name in metajava.java_term_info.JAVA_KEYWORDS or \
name in metajava.java_term_info.OBJECT_METHODS:
name += '_'
self.write({
'True': 'true',
'False': 'false',
'None': 'null',
'nil': 'null',
}.get(name, name))
def visit_arg(self, node):
self.write(node.arg)
def visit_NameConstant(self, node):
if node.value is None:
self.write("null")
elif node.value is True:
self.write("true")
elif node.value is False:
self.write("false")
else:
raise Unhandled(
"Don't know NameConstant with value %s" % node.value)
def visit_Attribute(self, node, emit_parens=True):
skip_parent = False
if attr_matches("r.ast", node):
# The java driver doesn't have that namespace, so we skip
# the `r.` prefix and create an ast class member in the
# test file. So stuff like `r.ast.rqlTzinfo(...)` converts
# to `ast.rqlTzinfo(...)`
skip_parent = True
if not skip_parent:
self.visit(node.value)
self.write(".")
self.write(metajava.dromedary(node.attr))
def visit_Num(self, node):
self.write(repr(node.n))
if not isinstance(node.n, float):
if node.n > 9223372036854775807 or node.n < -9223372036854775808:
self.write(".0")
else:
self.write("L")
def visit_Index(self, node):
self.visit(node.value)
def skip_if_arity_check(self, node):
'''Throws out tests for arity'''
rgx = re.compile('.*([Ee]xpect(ed|s)|Got) .* argument')
try:
if node.func.id == 'err' and rgx.match(node.args[1].s):
self.skip("arity checks done by java type system")
except (AttributeError, TypeError):
pass
def convert_if_string_encode(self, node):
'''Finds strings like 'foo'.encode("utf-8") and turns them into the
java version: "foo".getBytes(StandardCharsets.UTF_8)'''
try:
assert node.func.attr == 'encode'
node.func.value.s
encoding = node.args[0].s
except Exception:
return False
java_encoding = {
"ascii": "US_ASCII",
"utf-16": "UTF_16",
"utf-8": "UTF_8",
}[encoding]
self.visit(node.func.value)
self.write(".getBytes(StandardCharsets.")
self.write(java_encoding)
self.write(")")
return True
def bag_data_hack(self, node):
'''This is a very specific hack that isn't a general conversion method
whatsoever. In the tests we have an expected value like
bag(data * 2) where data is a list. This doesn't work in Java
obviously, but the only way to detect it "correctly" requires
type information in the ast, which we don't have. So the hack
here looks for this very specific case and rejiggers it. PRs
welcome for fixing this in a non-nasty way. In the meantime
I've made this extremely specific so it hopefully only gets
triggered by this specific case in the tests and not on
general conversions.
'''
try:
assert node.func.id == 'bag'
assert node.args[0].left.id == 'data'
assert type(node.args[0].op) == ast.Mult
assert node.args[0].right.n == 2
self.write("bag((List)")
self.write("Stream.concat(data.stream(), data.stream())")
self.write(".collect(Collectors.toList())")
self.write(")")
except Exception:
return False
else:
return True
def visit_Call(self, node):
self.skip_if_arity_check(node)
if self.convert_if_string_encode(node):
return
if self.bag_data_hack(node):
return
if type(node.func) == ast.Attribute and node.func.attr == 'error':
# This weird special case is because sometimes the tests
# use r.error and sometimes they use r.error(). The java
# driver only supports r.error(). Since we're coming in
# from a call here, we have to prevent visit_Attribute
# from emitting the parents on an r.error for us.
self.visit_Attribute(node.func, emit_parens=False)
else:
self.visit(node.func)
self.to_args(node.args, node.keywords)
def visit_Dict(self, node):
self.write("r.hashMap(")
if len(node.keys) > 0:
self.visit(node.keys[0])
self.write(", ")
self.visit(node.values[0])
for k, v in zip(node.keys[1:], node.values[1:]):
self.write(").with(")
self.visit(k)
self.write(", ")
self.visit(v)
self.write(")")
def visit_List(self, node):
self.write("r.array(")
self.join(", ", node.elts)
self.write(")")
def visit_Tuple(self, node):
self.visit_List(node)
def visit_Lambda(self, node):
if len(node.args.args) == 1:
self.visit(node.args.args[0])
else:
self.to_args(node.args.args)
self.write(" -> ")
self.visit(node.body)
def visit_Subscript(self, node):
if node.slice is None or type(node.slice.value) != ast.Num:
logger.error("While doing: %s", ast.dump(node))
raise Unhandled("Only integers subscript can be converted."
" Got %s" % node.slice.value.s)
self.visit(node.value)
self.write(".get(")
self.write(str(node.slice.value.n))
self.write(")")
def visit_ListComp(self, node):
gen = node.generators[0]
if type(gen.iter) == ast.Call and gen.iter.func.id.endswith('range'):
# This is really a special-case hacking of [... for i in
# range(i)] comprehensions that are used in the polyglot
# tests sometimes. It won't handle translating arbitrary
# comprehensions to Java streams.
self.write("LongStream.range(")
if len(gen.iter.args) == 1:
self.write("0, ")
self.visit(gen.iter.args[0])
elif len(gen.iter.args) == 2:
self.visit(gen.iter.args[0])
self.write(", ")
self.visit(gen.iter.args[1])
self.write(").boxed()")
else:
# Somebody came up with a creative new use for
# comprehensions in the test suite...
raise Unhandled("ListComp hack couldn't handle: ", ast.dump(node))
self.write(".map(")
self.visit(gen.target)
self.write(" -> ")
self.visit(node.elt)
self.write(").collect(Collectors.toList())")
def visit_UnaryOp(self, node):
opMap = {
ast.USub: "-",
ast.Not: "!",
ast.UAdd: "+",
ast.Invert: "~",
}
self.write(opMap[type(node.op)])
self.visit(node.operand)
def visit_BinOp(self, node):
opMap = {
ast.Add: " + ",
ast.Sub: " - ",
ast.Mult: " * ",
ast.Div: " / ",
ast.Mod: " % ",
}
t = type(node.op)
if t in opMap.keys():
self.visit(node.left)
self.write(opMap[t])
self.visit(node.right)
elif t == ast.Pow:
if type(node.left) == ast.Num and node.left.n == 2:
self.visit(node.left)
self.write(" << ")
self.visit(node.right)
else:
raise Unhandled("Can't do exponent with non 2 base")
class ReQLVisitor(JavaVisitor):
'''Mostly the same as the JavaVisitor, but converts some
reql-specific stuff. This should only be invoked on an expression
if it's already known to return true from is_reql'''
TOPLEVEL_CONSTANTS = {
'monday', 'tuesday', 'wednesday', 'thursday', 'friday',
'saturday', 'sunday', 'january', 'february', 'march', 'april',
'may', 'june', 'july', 'august', 'september', 'october',
'november', 'december', 'minval', 'maxval', 'error'
}
def is_byte_array_add(self, node):
'''Some places we do stuff like b'foo' + b'bar' and byte
arrays don't like that much'''
if (type(node.left) == ast.Bytes and
type(node.right) == ast.Bytes and
type(node.op) == ast.Add):
self.visit_Bytes(node.left, skip_suffix=True)
self.visit_Bytes(node.right, skip_prefix=True)
return True
else:
return False
def visit_BinOp(self, node):
if self.is_byte_array_add(node):
return
opMap = {
ast.Add: "add",
ast.Sub: "sub",
ast.Mult: "mul",
ast.Div: "div",
ast.Mod: "mod",
ast.BitAnd: "and",
ast.BitOr: "or",
}
func = opMap[type(node.op)]
if self.is_not_reql(node.left):
self.prefix(func, node.left, node.right)
else:
self.infix(func, node.left, node.right)
def visit_Compare(self, node):
opMap = {
ast.Lt: "lt",
ast.Gt: "gt",
ast.GtE: "ge",
ast.LtE: "le",
ast.Eq: "eq",
ast.NotEq: "ne",
}
if len(node.ops) != 1:
# Python syntax allows chained comparisons (a < b < c) but
# we don't deal with that here
raise Unhandled("Compare hack bailed on: ", ast.dump(node))
left = node.left
right = node.comparators[0]
func_name = opMap[type(node.ops[0])]
if self.is_not_reql(node.left):
self.prefix(func_name, left, right)
else:
self.infix(func_name, left, right)
def prefix(self, func_name, left, right):
self.write("r.")
self.write(func_name)
self.write("(")
self.visit(left)
self.write(", ")
self.visit(right)
self.write(")")
def infix(self, func_name, left, right):
self.visit(left)
self.write(".")
self.write(func_name)
self.write("(")
self.visit(right)
self.write(")")
def is_not_reql(self, node):
if type(node) in (ast.Name, ast.NameConstant,
ast.Num, ast.Str, ast.Dict, ast.List):
return True
else:
return False
def visit_Subscript(self, node):
self.visit(node.value)
if type(node.slice) == ast.Index:
# Syntax like a[2] or a["b"]
if self.smart_bracket and type(node.slice.value) == ast.Str:
self.write(".g(")
elif self.smart_bracket and type(node.slice.value) == ast.Num:
self.write(".nth(")
else:
self.write(".bracket(")
self.visit(node.slice.value)
self.write(")")
elif type(node.slice) == ast.Slice:
# Syntax like a[1:2] or a[:2]
self.write(".slice(")
lower, upper, rclosed = self.get_slice_bounds(node.slice)
self.write(str(lower))
self.write(", ")
self.write(str(upper))
self.write(")")
if rclosed:
self.write('.optArg("right_bound", "closed")')
else:
raise Unhandled("No translation for ExtSlice")
def get_slice_bounds(self, slc):
'''Used to extract bounds when using bracket slice
syntax. This is more complicated since Python3 parses -1 as
UnaryOp(op=USub, operand=Num(1)) instead of Num(-1) like
Python2 does'''
if not slc:
return 0, -1, True
def get_bound(bound, default):
if bound is None:
return default
elif type(bound) == ast.UnaryOp and type(bound.op) == ast.USub:
return -bound.operand.n
elif type(bound) == ast.Num:
return bound.n
else:
raise Unhandled(
"Not handling bound: %s" % ast.dump(bound))
right_closed = slc.upper is None
return get_bound(slc.lower, 0), get_bound(slc.upper, -1), right_closed
def visit_Attribute(self, node, emit_parens=True):
is_toplevel_constant = False
if attr_matches("r.row", node):
self.skip("Java driver doesn't support r.row", fatal=True)
elif is_name("r", node.value) and node.attr in self.TOPLEVEL_CONSTANTS:
# Python has r.minval, r.saturday etc. We need to emit
# r.minval() and r.saturday()
is_toplevel_constant = True
python_clashes = {
# These are underscored in the python driver to avoid
# keywords, but they aren't java keywords so we convert
# them back.
'or_': 'or',
'and_': 'and',
'not_': 'not',
}
method_aliases = {metajava.dromedary(k): v
for k, v in metajava.java_term_info
.METHOD_ALIASES.items()}
self.visit(node.value)
self.write(".")
initial = python_clashes.get(
node.attr, metajava.dromedary(node.attr))
initial = method_aliases.get(initial, initial)
self.write(initial)
if initial in metajava.java_term_info.JAVA_KEYWORDS or \
initial in metajava.java_term_info.OBJECT_METHODS:
self.write('_')
if emit_parens and is_toplevel_constant:
self.write('()')
def visit_UnaryOp(self, node):
if type(node.op) == ast.Invert:
self.visit(node.operand)
self.write(".not()")
else:
super(ReQLVisitor, self).visit_UnaryOp(node)
def visit_Call(self, node):
# We call the superclass first, so if it's going to fail
# because of r.row or other things it fails first, rather than
# hitting the checks in this method. Since everything is
# written to a stringIO object not directly to a file, if we
# bail out afterwards it's still ok
super_result = super(ReQLVisitor, self).visit_Call(node)
# r.for_each(1) etc should be skipped
if (attr_equals(node.func, "attr", "for_each") and
type(node.args[0]) != ast.Lambda):
self.skip("the java driver doesn't allow "
"non-function arguments to forEach")
# map(1) should be skipped
elif attr_equals(node.func, "attr", "map"):
def check(node):
if type(node) == ast.Lambda:
return True
elif hasattr(node, "func") and attr_matches("r.js", node.func):
return True
elif type(node) == ast.Dict:
return True
elif type(node) == ast.Name:
# The assumption is that if you're passing a
# variable to map, it's at least potentially a
# function. This may be misguided
return True
else:
return False
if not check(node.args[-1]):
self.skip("the java driver statically checks that "
"map contains a function argument")
else:
return super_result
def attr_equals(node, attr, value):
'''Helper for digging into ast nodes'''
return hasattr(node, attr) and getattr(node, attr) == value
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
from scrapy.conf import settings
from scrapy.exceptions import DropItem
from scrapy import log
from datetime import datetime
from hashlib import md5
class MongoPipeline(object):
def __init__(self, mongo_uri, mongo_db, collection_name):
from cwitunes.DBUtils import DBUtils
self.dbutils = DBUtils(mongo_uri, mongo_db, collection_name)
@classmethod
def from_crawler(cls, crawler):
return cls(
mongo_uri=crawler.settings.get('MONGODB_SERVER'),
mongo_db=crawler.settings.get('MONGODB_DB', 'items'),
collection_name=crawler.settings.get('MONGODB_COLLECTION')
)
def open_spider(self, spider):
self.dbutils.open_spider()
def close_spider(self, spider):
self.dbutils.close_spider()
def process_item(self, item, spider):
self.dbutils.process_item(item,spider)
return item
def _handle_error(self, failure, item, spider):
"""Handle occurred on db interaction."""
# do nothing, just log
log.err(failure)
|
# Generated by Django 3.0.3 on 2020-11-10 07:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('structure', '0032_structure_author_state'),
]
operations = [
migrations.RemoveField(
model_name='structuremodelrmsd',
name='TM_all',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='date',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='overall_all',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='overall_backbone',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='pdb',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='service',
),
migrations.RemoveField(
model_name='structuremodelrmsd',
name='version',
),
migrations.AddField(
model_name='structuremodelrmsd',
name='ECL1',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='ECL2',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='H8',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='ICL1',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='ICL2',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='binding_pocket',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AddField(
model_name='structuremodelrmsd',
name='main_template',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='main_template', to='structure.Structure'),
preserve_default=False,
),
migrations.AddField(
model_name='structuremodelrmsd',
name='target_structure',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='target_structure', to='structure.Structure'),
preserve_default=False,
),
migrations.AlterField(
model_name='structuremodelrmsd',
name='TM_backbone',
field=models.DecimalField(decimal_places=1, max_digits=2, null=True),
),
migrations.AlterField(
model_name='structuremodelrmsd',
name='homology_model',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='structure.StructureModel'),
),
]
|
# -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'greed is good who is your daddy?'
SSL_DISABLE = False
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_RECORD_QUERIES = True
MAIL_SERVER = 'smtp.126.com'
MAIL_PORT = 25
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = u'[手策]'
MAIL_SENDER = u'手策管理员 <kopei_nan@126.com>'
ADMIN = os.environ.get('ADMIN')
POSTS_PER_PAGE = 20
FOLLOWERS_PER_PAGE = 50
COMMENTS_PER_PAGE = 30
SLOW_DB_QUERY_TIME=0.5
UPLOAD_FOLDER = '/path/to/upload'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'jpg', 'png', 'gif', 'jpeg'])
ALLOW_MAX_FILE = 16 * 1024 * 1024
WHOOSH_BASE = os.path.join(basedir, 'search.db')
MAX_SEARCH_RESULTS = 50
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.MAIL_SENDER,
toaddrs=[cls.ADMIN],
subject=cls.MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
|
#!/usr/bin/env python3
# _*_coding:utf-8_*_
import os
import motor.motor_tornado
import redis
from pymongo import MongoClient
from common.logManageLib import get_logger
settings = dict(
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
cookie_secret="bZJc2sWbQLKoscdGkHn/VytuyfgXwQt8S0R0kRvJ5/xJ89E=",
login_url="/login",
xsrf_cookies=True,
debug=True,
)
# 设置mongodb的连接
client = motor.motor_tornado.MotorClient('mongodb://112.74.204.250:27017')
# 获取数据库连接
g_py_client = MongoClient("mongodb://112.74.204.250:27017")
# 设置redis的连接
g_redis_db = redis.StrictRedis(host='112.74.204.250', port=6379, password=None, db=1)
g_redis_time_5m = 5 * 60
g_redis_time_10m = 10 * 60
g_redis_time_30m = 30 * 60
g_redis_time_1h = 1 * 60 * 60
g_redis_time_2h = 2 * 60 * 60
g_redis_time_5h = 5 * 60 * 60
g_redis_time_1d = 24 * 60 * 60
g_redis_time_1w = 7 * 24 * 60 * 60
# 日志配置
logger = get_logger(strFileName="smartSearch.log", debug=20, showStreamLog=True, saveLogPath=None)
# domain 域名配置
domain = "http://www.liuhub.com/"
# domain = "http://127.0.0.1:8000/"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import Skype4Py
import datetime
import time
import random
import logging
import sqlite3
from hashlib import sha1
from collections import deque
FORMAT=u'%(name)s %(thread)d %(levelname)s: %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
logging.getLogger('').setLevel(logging.INFO)
logging.getLogger('app').setLevel(logging.INFO)
logging.getLogger('handlers').setLevel(logging.INFO)
logging.getLogger('SkypeMessenger').setLevel(logging.INFO)
log = logging.getLogger('app')
class QuizBot:
#Инициализация бота
def __init__(self):
log.info ("Starting application...")
#Дефолнтые значения для переменных
#Дата запуска
self.start = datetime.datetime.now()
#Файл с вопросами
self.bot_db = "QuizBot.db"
#Разрешенные чаты
self.listen_chats = [u'рабочие вопросы',
u'семейная викторина']
#период неповторения вопроса
self.quest_between = "-2 hours"
#период до второй подсказки (сек)
self.hint_timeout = 15
#период до ответа
self.answer_timeout = 25
#Бот не остановлен
self.running = False
#Активность викторины
self.listen = []
#Соединение с БД
self.db_conn = None
#Курсор БД
self.db_cur = None
#Текущий вопрос
self.current_question = {}
#Текущая подсказка
self.current_hint = {}
#Текущий ответ
self.current_answer = {}
#Проверка от повторений
self.last_message = {}
#Полученный контекст
self.context = ""
#Очередь задач
self.stack = deque([])
#Инстанс скайпа
self.skype = Skype4Py.Skype()
#Запущен ли клиент?
if not self.skype.Client.IsRunning:
self.skype.Client.Start()
#Подключение к запущенному скайпу
self.skype.Attach()
#Событие приёма сообщения
self.skype.OnMessageStatus = self.run_action
def run(self):
self.running = True
log.info("Now run!")
while self.running:
try:
if len(self.stack):
action = self.stack.popleft()
if action["time"]<=datetime.datetime.now():
chat = self.skype.Chat(action["chat"])
if chat.Name in self.listen\
and chat.Name in self.current_question:
hash_key = sha1(u"hash:{0}:{1}".format(
self.current_question[chat.Name],
self.current_answer[chat.Name])\
.encode('utf-8')).hexdigest()
if hash_key == action["hash"]:
if action["action"] == "answer":
chat.SendMessage(u"Правильный ответ: {0}"\
.format(self.current_answer[chat.Name]))
del(self.current_question[chat.Name])
del(self.current_hint[chat.Name])
del(self.current_answer[chat.Name])
self.stack.append({"time": datetime.datetime.now() +
datetime.timedelta(seconds=2),
"action": 'new_question',
"chat": self.context.Chat.Name})
elif action["action"] == "hint":
len_answer = \
int(len(self.current_answer[chat.Name]))
len_hint = int(1.5 * len_answer / 4)
hint = u'{}{}{}'.format(
self.current_answer[chat.Name][:len_hint],
'*'*(len_answer-len_hint*2),
self.current_answer[chat.Name][-len_hint:])
chat.SendMessage(u"Подсказка: {0}"\
.format(hint))
else:
if action["action"] == "new_question":
self.new_question()
else:
self.stack.append(action)
time.sleep(0.5)
except KeyboardInterrupt:
self.shutdown()
def shutdown(self):
log.debug("Disconnecting...")
self.stop_quiz()
log.info("Now shutdown...")
if self.db_conn and self.db_cur:
self.db_disconnect()
self.running = False
work_time = datetime.datetime.now() - self.start
log.info("Work time: %s" % work_time)
return True
def db_connect(self):
if not self.db_conn and not self.db_cur:
log.debug("Connect to database...")
self.db_conn = sqlite3.connect(self.bot_db)
log.debug("Get cursor")
self.db_cur = self.db_conn.cursor()
else:
log.error("Database connect already exits...")
def db_disconnect(self):
if self.db_conn and self.db_cur:
log.debug("Commit to database...")
self.db_conn.commit()
log.debug("Close connection...")
self.db_conn.close()
self.db_cur = None
self.db_conn = None
else:
log.error("No database connect...")
def run_action(self, message, status):
if message.Chat.FriendlyName.lower() in self.listen_chats and \
not (message.Sender.Handle in self.last_message and\
self.last_message[message.Sender.Handle] == message.Body) and \
(status == 'SENT' or status == 'RECEIVED'):
log.info(u"Action: '{0}' Message: '{1} ({2}): {3}'".format(status,
message.Sender.Handle,
message.Chat.FriendlyName,
message.Body))
self.last_message[message.Sender.Handle] = message.Body
command = message.Body.split(' ')[0].lower()
if command in self.functions:
self.context = message
self.functions[command](self)
elif self.context.Chat.Name in self.listen:
self.parse_answer(message)
else:
log.debug(u"Action: '{0}' Message: '{1} ({2}): {3}'".format(status,
message.Sender.Handle,
message.Chat.FriendlyName,
message.Body))
def new_question(self):
self.db_connect()
new_quest = self.db_cur.execute("""SELECT question, answer
FROM questions
WHERE last_show < strftime('%s','now','{0}')
ORDER BY RANDOM()
LIMIT 1""".format(self.quest_between)).fetchone()
if new_quest:
self.current_question[self.context.Chat.Name] = new_quest[0]
self.current_answer[self.context.Chat.Name] = new_quest[1]
hint = u'{}{}{}'.format(new_quest[1][0],
'*'*(len(new_quest[1])-2),
new_quest[1][-1])
self.current_hint[self.context.Chat.Name] = hint
self.context.Chat.SendMessage(u'Новый вопрос: {}'.format(new_quest[0]))
time.sleep(0.5)
self.context.Chat.SendMessage(u'/me Подсказка: {}'.format(hint))
self.db_cur.execute(u"""UPDATE questions
SET last_show = strftime('%s','now')
WHERE question = '{0}'
AND answer = '{1}'""".format(
new_quest[0],
new_quest[1]))
hash_key = sha1(u"hash:{0}:{1}".format(new_quest[0],new_quest[1])\
.encode('utf-8')).hexdigest()
self.stack.append({"time": datetime.datetime.now() +
datetime.timedelta(seconds=self.hint_timeout),
"action": 'hint',
"chat": self.context.Chat.Name,
"hash": hash_key})
self.stack.append({"time": datetime.datetime.now() +
datetime.timedelta(seconds=self.answer_timeout),
"action": 'answer',
"chat": self.context.Chat.Name,
"hash": hash_key})
else:
self.context.Chat.SendMessage(u'Вопросы кончились.')
self.stop_quiz()
self.db_disconnect()
def start_quiz(self):
if not self.context.Chat.Name in self.listen:
log.info("Starting quiz...")
self.context.Chat.SendMessage(u'/me Запускаем викторину!')
self.db_connect()
count_quest = self.db_cur.execute("SELECT COUNT(*) FROM questions")\
.fetchone()
self.db_disconnect()
if count_quest:
self.listen.append(self.context.Chat.Name)
self.context.Chat.SendMessage(u'/me Вопросы загружены. В базе \
%s вопросов' % count_quest)
self.stack.append({"time": datetime.datetime.now(),
"action": 'new_question',
"chat": self.context.Chat.Name})
else:
self.context.Chat.SendMessage(u'/me Вопросов в базе не найдено')
else:
self.context.Chat.SendMessage(u'Викторина уже запущена!!! \
Не стоит паниковать.')
def stop_quiz(self):
if self.listen:
log.info("Stoping quiz...")
self.listen = []
self.context.Chat.SendMessage(u'/me Викторина остановлена!')
def parse_answer(self, message):
if self.context.Chat.Name in self.listen\
and message.Chat.Name in self.current_answer \
and message.Body.lower() == self.current_answer[message.Chat.Name].lower():
self.listen.remove(self.context.Chat.Name)
del(self.current_question[self.context.Chat.Name])
del(self.current_hint[self.context.Chat.Name])
log.info(u"Correct answer '{0}' from user {1}'".format(message.Body,
message.Sender.Handle))
self.db_connect()
user_points = self.db_cur.execute("""SELECT points
FROM leaders
WHERE name = '{0}'
AND chat = '{1}'"""\
.format(message.Sender.Handle,
message.Chat.Name)).fetchone()
if user_points and user_points[0]:
user_points = int(user_points[0]) + 1
message.Chat.SendMessage(u"/me {0}, правильно!!! Ответ '{1}'. У тебя {2} очков."\
.format(message.Sender.Handle,
self.current_answer[message.Chat.Name],
user_points))
del(self.current_answer[self.context.Chat.Name])
self.db_cur.execute("""UPDATE leaders
SET points = {0}
WHERE name = '{1}'
AND chat = '{2}'""".format(user_points,
message.Sender.Handle,
message.Chat.Name))
else:
user_points = 1
message.Chat.SendMessage(u"/me {0}, правильно!!! Ответ '{1}'. У тебя первое очко."\
.format(message.Sender.Handle,
self.current_answer[message.Chat.Name]))
self.db_cur.execute("""INSERT INTO leaders(name, points, chat)
VALUES ('{0}', 1, '{1}')"""\
.format(message.Sender.Handle,
message.Chat.Name))
self.db_disconnect()
self.listen.append(self.context.Chat.Name)
self.stack.append({"time": datetime.datetime.now(),
"action": 'new_question',
"chat": self.context.Chat.Name})
def next_answer(self):
if self.context.Chat.Name in self.listen\
and self.context.Chat.Name in self.current_answer:
log.info(u"Next answer from user {0}'".format(self.context.Sender\
.Handle))
self.context.Chat.SendMessage(
u"/me Пользователь {0} пропустил вопрос. Правильный ответ был '{1}'"\
.format(self.context.Sender.Handle,
self.current_answer[self.context.Chat.Name]))
del(self.current_question[self.context.Chat.Name])
del(self.current_hint[self.context.Chat.Name])
del(self.current_answer[self.context.Chat.Name])
self.stack.append({"time": datetime.datetime.now() +
datetime.timedelta(seconds=2),
"action": 'new_question',
"chat": self.context.Chat.Name})
def show_hint(self):
if self.context.Chat.Name in self.listen:
self.context.Chat.SendMessage(u'/me Подсказка: {0}'.format(
self.current_hint[self.context.Chat.Name]))
def show_top10(self):
self.db_connect()
leaderboard = self.db_cur.execute("""SELECT name, points
FROM leaders
WHERE chat = '{0}'
ORDER BY points DESC
LIMIT 0,10"""\
.format(self.context.Chat.Name)).fetchall()
if len(leaderboard):
self.context.Chat.SendMessage(u"Топ-10 лидеров:")
time.sleep(1)
i = 1
for name, points in leaderboard:
self.context.Chat.SendMessage(u"/me {0}. {1} - {2}"\
.format(i, name, points))
i+=1
time.sleep(0.5)
i = None
else:
self.context.Chat.SendMessage(u"/me Лидеров еще нет")
self.db_disconnect()
#Допустимые комманды
functions = {"!start": start_quiz,
u"!старт": start_quiz,
"!stop": stop_quiz,
u"!стоп": stop_quiz,
"!next": next_answer,
u"!далее": next_answer,
"!hint": show_hint,
u"!подсказка": show_hint,
"!top": show_top10,
u"!топ": show_top10,
}
if __name__ == "__main__":
quiz_cis = QuizBot()
quiz_cis.run()
|
import sublime
import time
class Printer(object):
""" Based on printer of Mavensmate
"""
printers = {}
def __init__(self, name):
self.name = name
self.visible = False
@classmethod
def get(cls, name, window_id=None):
if not window_id: window_id = sublime.active_window().id()
printer = cls.printers.get(str(window_id)+name)
if not printer:
printer = Printer(name)
printer.window_id = window_id
printer.init()
cls.printers[str(window_id)+name] = printer
package_info = sublime.load_settings("package.sublime-settings")
version_info = "Copyright © 2013-2015 By %s, Dev Channel, Build v%s\n" % (
package_info.get("author"),
package_info.get("version")
)
printer.write(version_info, False)
return printer
def init(self):
if not hasattr(self, "panel"):
self.window = sublime.active_window()
self.panel = self.window.get_output_panel(self.name)
self.panel.settings().set('syntax', 'Packages/Java/Java.tmLanguage')
self.panel.settings().set('color_scheme', 'Packages/Color Scheme - Default/Monokai.tmTheme')
self.panel.settings().set('word_wrap', True)
self.panel.settings().set('gutter', True)
self.panel.settings().set('line_numbers', True)
def hide_panel(self):
self.visible = False
self.window.run_command('hide_panel', {
'panel': 'output.' + self.name
})
return self
def show_panel(self):
self.visible = True
self.window.run_command('show_panel', {
'panel': 'output.' + self.name
})
return self
def scroll_to_bottom(self):
size = self.panel.size()
sublime.set_timeout(lambda : self.panel.show(size, True), 2)
def write_start(self, message="-"*150):
return self.write(message, False)
def write(self, message, prefix=True):
# Show Panel
self.show_panel()
# Append message to panel
panel_message = message + "\n" if not prefix else "[%s] [%s] %s\n" % (
time.strftime("%Y.%m.%d %H:%M:%S", time.localtime(time.time())),
self.name.upper(),
message
)
self.panel.run_command("append", {
"characters": panel_message
})
# Scroll to bottom
self.scroll_to_bottom()
return self
def write_end(self, message=None):
return self.write(message, False)
|
import unittest
from honeybee.radiance.analysisgrid import AnalysisGrid
from honeybee.radiance.recipe.solaraccess.gridbased import SolarAccessGridBased
from ladybug.analysisperiod import AnalysisPeriod
from ladybug.epw import EPW
from honeybee.futil import bat_to_sh
import os
class SunlighthoursTestCase(unittest.TestCase):
"""Test for (honeybee/radiance/recipe/sunlighthours.py)."""
# preparing to test
def setUp(self):
"""Set up the test case by initiating the class."""
self.test_pts = [(0, 0, 0), (1, 1, 0), (3, 2, 0)]
self.test_vec = [(0, 0, 1), (0, 0, 1), (0, 0, 1)]
self.analysis_grid = AnalysisGrid.from_points_and_vectors(
self.test_pts, self.test_vec, 'test_grid')
self.hoys = [1908, 2000, 2530, 3254, 3658, 4000, 4116, 6324, 8508]
self.sun_vectors = [
(-0.810513, 0.579652, -0.084093), (-0.67166, 0.702357, -0.235729),
(-0.487065, 0.798284, -0.354275), (-0.269301, 0.8609, -0.431657),
(-0.033196, 0.885943, -0.462605), (0.20517, 0.871705, -0.445013),
(0.429563, 0.819156, -0.380077), (0.624703, 0.731875, -0.272221),
(0.777301, 0.615806, -0.128788)]
self.base_folder = os.path.abspath("tests/room/testrun")
self.run_folder = os.path.abspath("tests/room/testrun/test/solaraccess")
self.command = os.path.abspath(
"tests/room/testrun/test/solaraccess/commands.bat")
self.epwfile = os.path.abspath("tests/room/test.epw")
# ending the test
def tearDown(self):
"""Cleaning up after the test."""
pass
# test default values
def test_init_func(self):
"""Test normal init function."""
slh = SolarAccessGridBased(self.sun_vectors, self.hoys, [self.analysis_grid])
bat = slh.write(self.base_folder, project_name="test")
sh = bat_to_sh(bat)
# start to run the subprocess
if os.name == 'nt':
success = slh.run(bat)
else:
success = slh.run(sh)
if success:
assert slh.results()[0].ToString() == "AnalysisGrid::test_grid::#3::[*]"
def test_cls_method_analysis_period(self):
"""Make sure default values are set correctly."""
location = EPW(self.epwfile).location
ap = AnalysisPeriod(st_month=1, end_month=3)
slh = SolarAccessGridBased.from_location_and_analysis_period(
location, ap, [self.test_pts], [self.test_vec])
bat = slh.write(self.base_folder, project_name="test")
sh = bat_to_sh(bat)
# start to run the subprocess
if os.name == 'nt':
success = slh.run(bat)
else:
success = slh.run(sh)
if success:
ag = slh.results()[0]
for sensor in ag:
value = sum(v[0] for v in sensor.combined_values_by_id(sensor.hoys))
assert value == 978
def test_cls_method_hoy(self):
"""Make sure default values are set correctly."""
location = EPW(self.epwfile).location
hoys = range(1, 24)
slh = SolarAccessGridBased.from_location_and_hoys(
location, hoys, [self.test_pts], [self.test_vec])
bat = slh.write(self.base_folder, project_name="test")
sh = bat_to_sh(bat)
# start to run the subprocess
if os.name == 'nt':
success = slh.run(bat)
else:
success = slh.run(sh)
if success:
ag = slh.results()[0]
for sensor in ag:
value = sum(v[0] for v in sensor.combined_values_by_id(sensor.hoys))
assert value == 10
if __name__ == '__main__':
unittest.main()
|
"""add fields for meetup
Revision ID: 2fb6ec55051f
Revises: 37f73d8e6963
Create Date: 2014-09-15 02:49:23.106092
"""
# revision identifiers, used by Alembic.
revision = '2fb6ec55051f'
down_revision = '37f73d8e6963'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('calendar', sa.Column('eventbrite_disabled', sa.Boolean(), nullable=True))
op.add_column('calendar', sa.Column('google_disabled', sa.Boolean(), nullable=True))
op.add_column('calendar', sa.Column('meetup_disabled', sa.Boolean(), nullable=True))
op.add_column('event', sa.Column('to_eventbrite', sa.Boolean(), nullable=True))
op.add_column('event', sa.Column('to_google', sa.Boolean(), nullable=True))
op.add_column('event', sa.Column('to_meetup', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('event', 'to_meetup')
op.drop_column('event', 'to_google')
op.drop_column('event', 'to_eventbrite')
op.drop_column('calendar', 'meetup_disabled')
op.drop_column('calendar', 'google_disabled')
op.drop_column('calendar', 'eventbrite_disabled')
### end Alembic commands ###
|
#!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/serviceaccount_secret -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_serviceaccount_secret
short_description: Module to manage openshift service account secrets
description:
- Manage openshift service account secrets programmatically.
options:
state:
description:
- If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: false
aliases: []
service_account:
description:
- Name of the service account.
required: true
default: None
aliases: []
namespace:
description:
- Namespace of the service account and secret.
required: true
default: None
aliases: []
secret:
description:
- The secret that should be linked to the service account.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: get secrets of a service account
oc_serviceaccount_secret:
state: list
service_account: builder
namespace: default
register: sasecretout
- name: Link a service account to a specific secret
oc_serviceaccount_secret:
service_account: builder
secret: mynewsecret
namespace: default
register: sasecretout
'''
# -*- -*- -*- End included fragment: doc/serviceaccount_secret -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key']) or {}
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
rval = {}
results = ''
err = None
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"results": results,
"cmd": ' '.join(cmds)}
if returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
err = verr.args
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {}})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(contents)
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import yum
yum_base = yum.YumBase()
if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
return True
return False
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/serviceaccount.py -*- -*- -*-
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' instantiate a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets is None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# -*- -*- -*- End included fragment: lib/serviceaccount.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the ansible idempotent code '''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
# -*- -*- -*- End included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
def main():
'''
ansible oc module to manage service account secrets.
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default=None, required=True, type='str'),
secret=dict(default=None, type='str'),
service_account=dict(required=True, type='str'),
),
supports_check_mode=True,
)
rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
|
import unittest
from mock import patch
from exporters.exporter_config import ExporterConfig
from exporters.persistence.base_persistence import BasePersistence
from exporters.persistence.pickle_persistence import PicklePersistence
from exporters.utils import remove_if_exists
from .utils import valid_config_with_updates, meta
class BasePersistenceTest(unittest.TestCase):
def setUp(self):
self.config = valid_config_with_updates({
'exporter_options': {
'log_level': 'DEBUG',
'logger_name': 'export-pipeline',
'resume': False,
}
})
def test_get_last_position(self):
exporter_config = ExporterConfig(self.config)
with self.assertRaises(NotImplementedError):
persistence = BasePersistence(exporter_config.persistence_options, meta())
persistence.get_last_position()
def test_commit_position(self):
exporter_config = ExporterConfig(self.config)
with self.assertRaises(NotImplementedError):
persistence = BasePersistence(exporter_config.persistence_options, meta())
persistence.commit_position(1)
def test_generate_new_job(self):
exporter_config = ExporterConfig(self.config)
with self.assertRaises(NotImplementedError):
persistence = BasePersistence(exporter_config.persistence_options, meta())
persistence.generate_new_job()
def test_delete_instance(self):
exporter_config = ExporterConfig(self.config)
with self.assertRaises(NotImplementedError):
persistence = BasePersistence(exporter_config.persistence_options, meta())
persistence.close()
class PicklePersistenceTest(unittest.TestCase):
def setUp(self):
self.config = valid_config_with_updates({
'exporter_options': {
'log_level': 'DEBUG',
'logger_name': 'export-pipeline',
'resume': False,
},
'persistence': {
'name': 'exporters.persistence.pickle_persistence.PicklePersistence',
'options': {'file_path': '/tmp'}
}
})
@patch('pickle.dump')
@patch('uuid.uuid4')
def test_create_persistence_job(self, mock_uuid, mock_pickle):
file_name = '1'
mock_pickle.dump.return_value = True
mock_uuid.return_value = file_name
exporter_config = ExporterConfig(self.config)
try:
persistence = PicklePersistence(
exporter_config.persistence_options, meta())
self.assertIsInstance(persistence, PicklePersistence)
persistence.close()
finally:
remove_if_exists('/tmp/'+file_name)
@patch('os.path.isfile', autospec=True)
@patch('__builtin__.open', autospec=True)
@patch('pickle.dump', autospec=True)
@patch('pickle.load', autospec=True)
def test_get_last_position(self, mock_load_pickle, mock_dump_pickle, mock_open, mock_is_file):
mock_dump_pickle.return_value = True
mock_is_file.return_value = True
mock_load_pickle.return_value = {'last_position': {'last_key': 10}}
exporter_config = ExporterConfig(self.config)
persistence = PicklePersistence(exporter_config.persistence_options, meta())
self.assertEqual({'last_key': 10}, persistence.get_last_position())
@patch('__builtin__.open', autospec=True)
@patch('pickle.dump', autospec=True)
@patch('uuid.uuid4', autospec=True)
def test_commit(self, mock_uuid, mock_dump_pickle, mock_open):
mock_dump_pickle.return_value = True
mock_uuid.return_value = 1
exporter_config = ExporterConfig(self.config)
persistence = PicklePersistence(exporter_config.persistence_options, meta())
self.assertEqual(None, persistence.commit_position(10))
self.assertEqual(persistence.get_metadata('commited_positions'), 1)
|
'''
:copyright: Copyright 2013 by christian.meichsner@informatik.tu-chemnitz.de, see AUTHORS.
:license: BSD, see LICENSE for details.
'''
from distutils.core import setup
setup(name='ComparareEtPendere',
version='0.9',
packages=['MultisourceHtmlFormatter'],
provides=['MultisourceHtmlFormatter'],
description='Multisource Code Formatter outputting HTML. Based on pygments',
author='Christian Meichsner',
author_email='christian.meichsner@informatik.tu-chemnitz.de',
url='https://github.com/gixxi/comparareetpendere',
license="BSD",
platforms="Tested with Python 3.1",
keywords='sourcecode formatting html output pygments python documentation',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
'Topic :: Software Development :: Documentation'
],
)
|
__author__ = "Radical.Utils Development Team (Andre Merzky, Ole Weidner)"
__copyright__ = "Copyright 2013, RADICAL@Rutgers"
__license__ = "MIT"
# import utility classes
from object_cache import ObjectCache
from plugin_manager import PluginManager
from singleton import Singleton
from threads import Thread, RLock, NEW, RUNNING, DONE, FAILED
from url import Url
from dict_mixin import DictMixin, dict_merge, dict_stringexpand
from lockable import Lockable
from registry import Registry, READONLY, READWRITE
from regex import ReString, ReSult
from reporter import Reporter
from benchmark import Benchmark
from lease_manager import LeaseManager
# import utility methods
from ids import generate_id, ID_SIMPLE, ID_UNIQUE
from read_json import read_json
from read_json import read_json_str
from read_json import parse_json
from read_json import parse_json_str
from tracer import trace, untrace
from which import which
from misc import split_dburl, mongodb_connect
from misc import parse_file_staging_directives
from misc import time_diff
from get_version import get_version
# import sub-modules
# from config import Configuration, Configurable, ConfigOption, getConfig
# ------------------------------------------------------------------------------
import os
_mod_root = os.path.dirname (__file__)
version = open (_mod_root + "/VERSION", "r").readline ().strip ()
version_detail = open (_mod_root + "/VERSION.git", "r").readline ().strip ()
# ------------------------------------------------------------------------------
|
"""foodapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from food import views
from django.contrib.auth import views as auth_views
from django.views.generic.base import TemplateView
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^$', views.server_list, name='server_list'),
url(r'^$', views.server_list1, name='server_list'),
url(r'^log/$', auth_views.login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/$', auth_views.logout, {'template_name': 'server_list.html'}, name='logout'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
url(r'^register/$', views.register, name='register'),
url(r'^pfbusi/$', views.pfbusi, name='pfbusi'),
url(r'^rest/$', views.rest, name='rest'),
]
if settings.DEBUG:
urlpatterns+= static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
# Twitter API Test
# Execute: $ python twitter_test_02.py
from twython import TwythonStreamer
# Search terms (Note: hashtags only right now!)
terms = ['#fml', '#blessed', '#diy', '#guncontrol', '#obamacare']
# Twitter authentication
APP_KEY = 'li8wn8Tb7xBifCnNIgyqUw'
APP_SECRET = 'vcwq36w4C4VXamlqWBDKM2E8etsOoangDoMhxNDU'
OAUTH_TOKEN = '1969690717-rGw3VkRQ8IyL4OcPWtv5Y2CeBdVn8ndJrjGKraI'
OAUTH_TOKEN_SECRET = 'KO7YIFMKWKaYTtz2zEyaSy044ixj5kIbWrDtZZL96ly0H'
# Twitter streamer class
class MyStreamer(TwythonStreamer):
def on_success(self, data):
if 'text' in data:
msg = data['text'].encode('utf-8')
print msg
for hashtag in data['entities']['hashtags']:
for term in score_terms:
if term == hashtag['text'].lower():
print term
i = score_terms.index(term)
scoreboard[1][i] += 1
print scoreboard
print '\n'
def on_error(self, status_code, data):
print status_code, data
# Setup tracking and scoring term arrays
track_terms = [''.join([x,' ']) for x in terms]
print 'Tracking: ', track_terms
score_terms = [x[1:].lower() for x in terms]
print 'Scoring: ', score_terms
# Setup scoreboard
scoreboard = [terms, [0]*len(terms)]
print scoreboard
print '\n'
# Setup streamer and filter(s)
stream = MyStreamer(APP_KEY, APP_SECRET,
OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
stream.statuses.filter(track=track_terms)
|
from unittest import mock
from .. import *
from bfg9000.platforms import platform_name, target, posix
class TestTargetPlatform(TestCase):
def setUp(self):
platform_name._reset()
def tearDown(self):
platform_name._reset()
def test_default(self):
with mock.patch('platform.system', return_value='Linux'), \
mock.patch('platform.machine', return_value='i686'): # noqa
platform = target.platform_info()
self.assertEqual(platform.name, 'linux')
self.assertEqual(platform.species, 'linux')
self.assertEqual(platform.genus, 'linux')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.triplet, 'i686-pc-linux-gnu')
self.assertEqual(platform.object_format, 'elf')
def test_cygwin(self):
with mock.patch('platform.machine', return_value='x86_64'):
platform = target.platform_info('cygwin')
self.assertEqual(platform.name, 'cygwin')
self.assertEqual(platform.species, 'cygwin')
self.assertEqual(platform.genus, 'cygwin')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.triplet, 'x86_64-unknown-windows-cygnus')
self.assertEqual(platform.object_format, 'coff')
windows = target.platform_info('cygwin')
posix = target.platform_info('linux')
for i in ('object_format', 'executable_ext', 'shared_library_ext',
'has_import_library', 'has_versioned_library'):
self.assertEqual(getattr(platform, i), getattr(windows, i))
for i in ('has_frameworks', 'install_dirs'):
self.assertEqual(getattr(platform, i), getattr(posix, i))
def test_darwin(self):
with mock.patch('platform.machine', return_value='x86_64'):
platform = target.platform_info('macos')
self.assertEqual(platform.name, 'macos')
self.assertEqual(platform.species, 'macos')
self.assertEqual(platform.genus, 'darwin')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.triplet, 'x86_64-apple-darwin')
self.assertEqual(platform.object_format, 'mach-o')
def test_linux(self):
with mock.patch('platform.machine', return_value='x86_64'):
platform = target.platform_info('linux')
self.assertEqual(platform.name, 'linux')
self.assertEqual(platform.species, 'linux')
self.assertEqual(platform.genus, 'linux')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.triplet, 'x86_64-unknown-linux-gnu')
self.assertEqual(platform.object_format, 'elf')
def test_android(self):
with mock.patch('platform.machine', return_value='arm'):
platform = target.platform_info('android')
self.assertEqual(platform.name, 'android')
self.assertEqual(platform.species, 'android')
self.assertEqual(platform.genus, 'linux')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.triplet, 'arm-unknown-linux-android')
self.assertEqual(platform.object_format, 'elf')
def test_windows(self):
with mock.patch('platform.machine', return_value='x86_64'):
platform = target.platform_info('winnt')
self.assertEqual(platform.name, 'winnt')
self.assertEqual(platform.species, 'winnt')
self.assertEqual(platform.genus, 'winnt')
self.assertEqual(platform.family, 'windows')
self.assertEqual(platform.triplet, 'x86_64-unknown-win32')
self.assertEqual(platform.object_format, 'coff')
def test_unknown(self):
with mock.patch('platform.machine', return_value='x86_64'):
platform = target.platform_info('onosendai')
self.assertEqual(platform.name, 'onosendai')
self.assertEqual(platform.species, 'onosendai')
self.assertEqual(platform.genus, 'onosendai')
self.assertEqual(platform.family, 'posix')
self.assertEqual(platform.object_format, 'elf')
self.assertEqual(platform.triplet, 'x86_64-unknown-onosendai')
def test_equality(self):
a = posix.PosixTargetPlatform('linux', 'linux', 'x86_64')
b = posix.PosixTargetPlatform('linux', 'linux', 'x86_64')
c = posix.PosixTargetPlatform('linux', 'android', 'arm')
self.assertTrue(a == b)
self.assertFalse(a != b)
self.assertFalse(a == c)
self.assertTrue(a != c)
def test_json(self):
plat = posix.PosixTargetPlatform('linux', 'linux', 'x86_64')
json = plat.to_json()
self.assertEqual(target.from_json(json), plat)
|
import os
import base64
import time
from random import randrange
from mock import patch, MagicMock
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor, protocol
from twisted.internet.udp import Port
from twistar.registry import Registry
from floranet.lora.wan import LoraWAN, Rxpk
from floranet.netserver import NetServer
import floranet.lora.mac as lora_mac
from floranet.models.model import Model
from floranet.models.config import Config
from floranet.models.gateway import Gateway
from floranet.models.device import Device
from floranet.models.application import Application
import floranet.test.unit.mock_dbobject as mockDBObject
import floranet.test.unit.mock_model as mockModel
from floranet.test.unit.mock_reactor import reactorCall
class NetServerTest(unittest.TestCase):
@inlineCallbacks
def setUp(self):
"""Test setup. Creates a new NetServer
Use factory default configuration.
"""
Registry.getConfig = MagicMock(return_value=None)
# Get factory default configuration
with patch.object(Model, 'save', MagicMock()):
config = yield Config.loadFactoryDefaults()
self.server = NetServer(config)
def _test_device(self):
"""Create a test device object """
return Device(
deveui=int('0x0F0E0E0D00010209', 16),
devaddr=int('0x06000001', 16),
appeui=int('0x0A0B0C0D0A0B0C0D', 16),
nwkskey=int('0xAEB48D4C6E9EA5C48C37E4F132AA8516', 16),
appskey=int('0x7987A96F267F0A86B739EED480FC2B3C', 16),
adr= True,
tx_chan=3,
tx_datr='SF7BW125',
gw_addr='192.168.1.125',
enabled = True)
def test_checkDevaddr(self):
"""Test checkDevaddr method"""
# Check valid address
device = self._test_device()
result = self.server.checkDevaddr(device.devaddr)
self.assertTrue(result)
# Check invalid address
devaddr = int('0x11223344', 16)
result = self.server.checkDevaddr(devaddr)
self.assertFalse(result)
@inlineCallbacks
def test_getOTAADevAddrs(self):
"""Test getOTAADevAddrs method"""
device = self._test_device()
mockDBObject.return_value = device
expected = [[], [device.devaddr]]
results = []
# Test when no devices are found
with patch.object(Device, 'find', classmethod(mockDBObject.findFail)):
result = yield self.server._getOTAADevAddrs()
results.append(result)
# Test when one device is found
with patch.object(Device, 'find', classmethod(mockDBObject.findOne)):
result = yield self.server._getOTAADevAddrs()
results.append(result)
self.assertEqual(expected, results)
@inlineCallbacks
def test_getFreeOTAAddress(self):
expected = [self.server.config.otaastart,
self.server.config.otaastart+1,
self.server.config.otaaend, None]
results = []
# Test with empty OTA device list
# Mock the server method to return the devaddr list
with patch.object(self.server, '_getOTAADevAddrs',
MagicMock(return_value=[])):
result = yield self.server._getFreeOTAAddress()
results.append(result)
# Test with one OTA device
with patch.object(self.server, '_getOTAADevAddrs', MagicMock(
return_value=[self.server.config.otaastart])):
result = yield self.server._getFreeOTAAddress()
results.append(result)
# Test with last address only available
with patch.object(self.server, '_getOTAADevAddrs',MagicMock(
return_value=xrange(self.server.config.otaastart,
self.server.config.otaaend))):
result = yield self.server._getFreeOTAAddress()
results.append(result)
# Test with no address available
with patch.object(self.server, '_getOTAADevAddrs',MagicMock(
return_value=xrange(self.server.config.otaastart,
self.server.config.otaaend + 1))):
result = yield self.server._getFreeOTAAddress()
results.append(result)
self.assertEqual(expected, results)
@inlineCallbacks
def test_getActiveDevice(self):
# Include for coverage. We are essentially testing a returnValue() call.
device = self._test_device()
mockDBObject.return_value = device
expected = device.deveui
with patch.object(Device, 'find', classmethod(mockDBObject.findSuccess)):
result = yield self.server._getActiveDevice(device.devaddr)
self.assertEqual(expected, result.deveui)
def test_checkDuplicateMessage(self):
m = lora_mac.MACDataMessage()
m.mic = 1111
self.server.config.duplicateperiod = 10
expected = [True, False]
result = []
now = time.time()
# Test a successful find of the duplicate
for i in (1,10):
self.server.message_cache.append((randrange(1,1000), now - i))
self.server.message_cache.append(
(m.mic, now - self.server.config.duplicateperiod + 1))
result.append(self.server._checkDuplicateMessage(m))
# Test an unsuccessful find of the duplicate - the message's
# cache period has expired.
self.server.message_cache.remove(
(m.mic, now - self.server.config.duplicateperiod + 1))
self.server.message_cache.append(
(m.mic, now - self.server.config.duplicateperiod - 1))
result.append(self.server._checkDuplicateMessage(m))
self.assertEqual(expected, result)
def test_cleanMessageCache(self):
self.server.config.duplicateperiod = 10
# Create 10 cache entries, remove 5
now = time.time()
for i in range(1,21,2):
self.server.message_cache.append((i, now - i))
expected = 5
self.server._cleanMessageCache()
result = len(self.server.message_cache)
self.assertEqual(expected, result)
def test_manageMACCommandQueue(self):
self.server.config.macqueuelimit = 10
# Create 10 cache entries, remove 5
now = time.time()
for i in range(1,21,2):
self.server.commands.append((int(now - i), i, lora_mac.LinkCheckAns()))
expected = 5
self.server._manageMACCommandQueue()
result = len(self.server.commands)
self.assertEqual(expected, result)
@inlineCallbacks
def test_processADRRequests(self):
device = self._test_device()
device.snr_average = 3.5
device.adr_datr = None
# Test we set adr_datr device attribute properly
expected = ['SF9BW125', False]
results = []
mockDBObject.return_value = [device]
mockModel.mock_object = device
with patch.object(Device, 'all', classmethod(mockDBObject.all)), \
patch.object(device, 'update', mockModel.update), \
patch.object(self.server, '_sendLinkADRRequest', MagicMock()):
# Remove any delays
self.server.config.adrmessagetime = 0
yield self.server._processADRRequests()
results.append(device.adr_datr)
results.append(self.server.adrprocessing)
self.assertEqual(expected, results)
def _createCommands(self):
datarate = 'SF7BW125'
chmask = int('FF', 16)
return [lora_mac.LinkCheckAns(), lora_mac.LinkADRReq(datarate, 0, chmask, 6, 0)]
def test_queueMACCommand(self):
device = self._test_device()
commands = self._createCommands()
expected = [2, lora_mac.LINKCHECKANS, lora_mac.LINKADRREQ]
for c in commands:
self.server._queueMACCommand(device.deveui, c)
result = [len(self.server.commands), self.server.commands[0][2].cid,
self.server.commands[1][2].cid]
self.assertEqual(expected, result)
def test_dequeueMACCommand(self):
device = self._test_device()
commands = self._createCommands()
for c in commands:
self.server._queueMACCommand(device.deveui, c)
self.server._dequeueMACCommand(device.deveui, commands[1])
expected = [1, lora_mac.LINKCHECKANS]
result = [len(self.server.commands), self.server.commands[0][2].cid]
self.assertEqual(expected, result)
def test_scheduleDownlinkTime(self):
offset = 10
tmst = randrange(0, 4294967295 - 10000000)
expected = [tmst + 10000000, 5000000]
result = []
result.append(self.server._scheduleDownlinkTime(tmst, offset))
tmst = 4294967295 - 5000000
result.append(self.server._scheduleDownlinkTime(tmst, offset))
self.assertEqual(expected, result)
def test_txpkResponse(self):
self.server.lora = LoraWAN(self)
self.server.lora.addGateway(Gateway(host='192.168.1.125', name='Test',
enabled=True, power=26))
tmst = randrange(0, 4294967295)
rxpk = Rxpk(tmst=tmst, chan=3, freq=915.8, datr='SF7BW125',
data="n/uSwM0LIED8X6QV0mJMjC6oc2HOWFpCfmTry", size=54)
device = self._test_device()
device.rx = self.server.band.rxparams((rxpk.chan, rxpk.datr), join=False)
gateway = self.server.lora.gateway(device.gw_addr)
expected = [(True, device.rx[1]['freq'], device.rx[1]['datr']),
(True, device.rx[2]['freq'], device.rx[2]['datr']),
(tmst + 1000000, device.rx[1]['freq'], device.rx[1]['datr']),
(tmst + 2000000, device.rx[2]['freq'], device.rx[2]['datr'])]
result = []
txpk = self.server._txpkResponse(device, rxpk.data, gateway, tmst, immediate=True)
for i in range(1,3):
result.append((txpk[i].imme, txpk[i].freq, txpk[i].datr))
txpk = self.server._txpkResponse(device, rxpk.data, gateway, tmst, immediate=False)
for i in range(1,3):
result.append((txpk[i].tmst, txpk[i].freq, txpk[i].datr))
self.assertEqual(expected, result)
def _processJoinRequest(self, request):
"""Called by test_processJoinRequest_pass and
test_processJoinRequest_fail"""
device = self._test_device()
app = self.server.config.apps[0]
# Passing join request
request = base64.b64decode("AA0MCwoNDAsKAwIBAA0ODg9IklIgzCM=")
msg = lora_mac.MACMessage.decode(request)
result = yield self.server._processJoinRequest(msg, app, device)
self.assertTrue(result)
# Failing join request
request = base64.b64decode("AA0MCwoNDAsKAwIBAA0ODg9IklIgzCX=")
msg = lora_mac.MACMessage.decode(request)
result = yield self.server._processJoinRequest(msg, app, device)
self.assertFalse(result)
|
#coding:utf-8
"""
Django settings for mywebsite project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'mx=_n)ji!d!+llfrhkwljbh9*0$l=4io@u0mchg4w#1w77xvk#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'suit', # django后台
#'django_admin_bootstrapped', #一个bootstrap样式的后台
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#apps
'blog',
#'markdown_deux', #markdown support
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mywebsite.urls'
WSGI_APPLICATION = 'mywebsite.wsgi.application'
'''
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
#配置mysql
# 线上数据库的配置
MYSQL_HOST = 'w.rdc.sae.sina.com.cn'
MYSQL_PORT = '3307'
MYSQL_USER = '02z4loxk1y'
MYSQL_PASS = 'iky3xmxxz4jwk1j401lzmzlzmhmykyll05kxkwmx'
MYSQL_DB = 'app_zhuzhezhe'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB,
'USER': MYSQL_USER,
'PASSWORD': MYSQL_PASS,
'HOST': MYSQL_HOST,
'PORT': MYSQL_PORT,
}
}
'''
# sqlite3配置
'''
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# mysql配置
'''
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'mywebsite',
'HOST': '127.0.0.1',
'PORT': '3306',
'USER': 'root',
'PASSWORD': 'password',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
# 静态文件的相关设置
STATIC_URL = '/static/'
STATIC_ROOT = 'static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
'/blog/static/',
)
#STATIC_ROOT = "/blog/static/"
TEMPLATE_DIRS = (os.path.join(BASE_DIR, 'templates'),)
#other
DAB_FIELD_RENDERER = 'django_admin_bootstrapped.renderers.BootstrapFieldRenderer'
#这里是后台的配置
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP
TEMPLATE_CONTEXT_PROCESSORS = TCP + (
'django.core.context_processors.request',
)
|
"""
pyparticleprobe.dsd_calcs.attenuation
=========================
A grouping of functions that calcuates attenuation characteristics
7 Feb 2014 - Adapted by Nick Guy NOAA/NSSL/WRDD, NRC
"""
# HISTORY::
# 3 Feb 2014 - Nick Guy. NOAA/NSSL, NRC (nick.guy@noaa.gov)
#
# NOTES::
# Arrays seem to be able to be passed, but make sure they are float arrays
# (e.g. created with numpy) and not lists
#
# FUNCTIONS::
# abs_coeff - Absorption coefficient
# scat_coeff - Scattering coefficient
# ext_coeff - Extinction coefficient
# spec_atten - Specific attenuation
#-------------------------------------------------------------------
# Load the needed packages
import numpy as np
#===============================================================
# BEGIN FUNCTIONS
#===============================================================
def abs_coeff(D,lam,m):
"""Absorption coefficient of a spherical particle
From Doviak and Zrnic (1993), Eqn 3.14a or Battan (1973), Eqn 6.6
INPUT::
D = Particle diameter [m]
lam = Radar wavelength [m]
m = Complex refractive index [unitless]
OUTPUT::
Qa = Absorption coefficient
USAGE::
Qa = abs_coeff(D,lam,m)
NOTES::
The default is for a dielectric factor value for water. This can be
changed by the user, e.g. K=0.208 for particle sizes of equivalent melted
diameters or K=0.176 for particle sizes of equivalent ice spheres.
"""
#---------------------------------------
Km = (m**2 - 1) / (m**2 + 2)
Qa = (np.pi**2 * D**3 / lam) * np.imag(-1 * Km)
return Qa
#====================================================
def scat_coeff(D,lam,m):
"""Scattering coefficient of a spherical particle
From Doviak and Zrnic (1993), Eqn 3.14b or Battan (1973), Eqn 6.5
INPUT::
D = Particle diameter [m]
lam = Radar wavelength [m]
m = Complex refractive index [unitless]
OUTPUT::
Qs = Scattering coefficient
USAGE::
Qs = scat_coeff(D,lam,m)
"""
#---------------------------------------
Km = (m**2 - 1) / (m**2 + 2)
Qs = (2 * np.pi**5 * D**6 / (3 * lam**4) * (np.absolute(Km))**2)
return Qs
#====================================================
def ext_coeff(D,lam,m):
"""Extinction coefficient of a spherical particle
From Doviak and Zrnic (1993), Eqn 3.14b or Battan (1973), Eqn 6.5
INPUT::
D = Particle diameter [m]
lam = Radar wavelength [m]
m = Complex refractive index [unitless]
OUTPUT::
Qe = Scattering coefficient
USAGE::
Qe = ext_coeff(D,lam,m)
NOTES::
The default is for a dielectric factor value for water. This can be
changed by the user, e.g. K=0.208 for particle sizes of equivalent melted
diameters or K=0.176 for particle sizes of equivalent ice spheres.
"""
#---------------------------------------
Qa = abs_coeff(D,lam,m)
Qs = scat+coeff(D,lam,m)
Qe = Qa + Qs
return Qe
#====================================================
def spec_atten(Nd,Diam,lam,m):
"""Extinction coefficient of a spherical particle
From Doviak and Zrnic (1993), Eqn 3.15
INPUT::
Nd = Drop concentration as a function of drop size [m^-3]
Diam = Drop size diameter [mm]
lam = Radar wavelength [m]
m = Complex refractive index [unitless]
OUTPUT::
K = Specific attenuation [dB/km]
USAGE::
K = spec_atten(Nd,Diam,lam,m)
NOTES::
The default is for a dielectric factor value for water. This can be
changed by the user, e.g. K=0.208 for particle sizes of equivalent melted
diameters or K=0.176 for particle sizes of equivalent ice spheres.
"""
#---------------------------------------
Qa = abs_coeff(Diam,lam,m)
Qs = scat_coeff(Diam,lam,m)
Qe = Qa + Qs
# Calculate specific attenuation
K = 4.34e3 * Nd * Qe
return Qe
#====================================================
|
"""
This example shows the trade-off (pareto frontier) of deficit against cost by altering a reservoir control curve.
Two types of control curve are possible. The first is a monthly control curve containing one value for each
month. The second is a harmonic control curve with cosine terms around a mean. Both Parameter objects
are part of pywr.parameters.
Inspyred is used in this example to perform a multi-objective optimisation using the NSGA-II algorithm. The
script should be run twice (once with --harmonic) to generate results for both types of control curve. Following
this --plot can be used to generate an animation and PNG of the pareto frontier.
"""
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import datetime
import inspyred
from pywr.core import Model, Input, Output, Link, Storage
from pywr.parameters import ArrayIndexedParameter, MonthlyProfileParameter, AnnualHarmonicSeriesParameter
from pywr.parameters.control_curves import ControlCurveParameter
from pywr.recorders import TotalDeficitNodeRecorder, TotalFlowNodeRecorder, AggregatedRecorder
from pywr.optimisation.moea import InspyredOptimisationModel
def create_model(harmonic=True):
# import flow timeseries for catchments
flow = pd.read_csv(os.path.join('data', 'thames_stochastic_flow.gz'))
flow['Date'] = flow['Date'].apply(pd.to_datetime)
flow.set_index('Date', inplace=True)
# resample input to weekly average
flow = flow.resample('7D', how='mean')
model = InspyredOptimisationModel(
solver='glpk',
start=flow.index[0],
end=flow.index[365*10], # roughly 10 years
timestep=datetime.timedelta(7), # weekly time-step
)
flow_parameter = ArrayIndexedParameter(model, flow['flow'].values)
catchment1 = Input(model, 'catchment1', min_flow=flow_parameter, max_flow=flow_parameter)
catchment2 = Input(model, 'catchment2', min_flow=flow_parameter, max_flow=flow_parameter)
reservoir1 = Storage(model, 'reservoir1', min_volume=3000, max_volume=20000, initial_volume=16000)
reservoir2 = Storage(model, 'reservoir2', min_volume=3000, max_volume=20000, initial_volume=16000)
if harmonic:
control_curve = AnnualHarmonicSeriesParameter(model, 0.5, [0.5], [0.0], mean_upper_bounds=1.0, amplitude_upper_bounds=1.0)
else:
control_curve = MonthlyProfileParameter(model, np.array([0.0]*12), lower_bounds=0.0, upper_bounds=1.0)
control_curve.is_variable = True
controller = ControlCurveParameter(model, reservoir1, control_curve, [0.0, 10.0])
transfer = Link(model, 'transfer', max_flow=controller, cost=-500)
demand1 = Output(model, 'demand1', max_flow=45.0, cost=-101)
demand2 = Output(model, 'demand2', max_flow=20.0, cost=-100)
river1 = Link(model, 'river1')
river2 = Link(model, 'river2')
# compensation flows from reservoirs
compensation1 = Link(model, 'compensation1', max_flow=5.0, cost=-9999)
compensation2 = Link(model, 'compensation2', max_flow=5.0, cost=-9998)
terminator = Output(model, 'terminator', cost=1.0)
catchment1.connect(reservoir1)
catchment2.connect(reservoir2)
reservoir1.connect(demand1)
reservoir2.connect(demand2)
reservoir2.connect(transfer)
transfer.connect(reservoir1)
reservoir1.connect(river1)
reservoir2.connect(river2)
river1.connect(terminator)
river2.connect(terminator)
reservoir1.connect(compensation1)
reservoir2.connect(compensation2)
compensation1.connect(terminator)
compensation2.connect(terminator)
r1 = TotalDeficitNodeRecorder(model, demand1)
r2 = TotalDeficitNodeRecorder(model, demand2)
r3 = AggregatedRecorder(model, [r1, r2], agg_func="mean")
r3.is_objective = 'minimise'
r4 = TotalFlowNodeRecorder(model, transfer)
r4.is_objective = 'minimise'
return model
def moea_main(prng=None, display=False, harmonic=False):
from random import Random
from time import time
if prng is None:
prng = Random()
prng.seed(time())
script_name = os.path.splitext(os.path.basename(__file__))[0]
stats_file = open('{}-{}-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
problem = create_model(harmonic=harmonic)
problem.setup()
ea = inspyred.ec.emo.NSGA2(prng)
ea.variator = [inspyred.ec.variators.blend_crossover,
inspyred.ec.variators.gaussian_mutation]
ea.terminator = inspyred.ec.terminators.generation_termination
ea.observer = [
inspyred.ec.observers.file_observer,
]
final_pop = ea.evolve(generator=problem.generator,
evaluator=problem.evaluator,
pop_size=25,
bounder=problem.bounder,
maximize=False,
max_generations=50,
statistics_file=stats_file,
individuals_file=individuals_file)
# Save the final population archive to CSV files
stats_file = open('{}-{}-final-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-final-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
inspyred.ec.observers.file_observer(ea.archive, 'final', None,
args={'statistics_file': stats_file, 'individuals_file': individuals_file})
if display:
final_arc = ea.archive
print('Best Solutions: \n')
for f in final_arc:
print(f)
x = []
y = []
for f in final_arc:
x.append(f.fitness[0])
y.append(f.fitness[1])
plt.scatter(x, y, c='b')
plt.xlabel('Total demand deficit [Ml/d]')
plt.ylabel('Total Transferred volume [Ml/d]')
title = 'Harmonic Control Curve' if harmonic else 'Monthly Control Curve'
plt.savefig('{0} Example ({1}).pdf'.format(ea.__class__.__name__, title), format='pdf')
plt.show()
return ea
def load_individuals(filename):
""" Read an inspyred individuals file in to two pandas.DataFrame objects.
There is one DataFrame for the objectives and another for the variables.
"""
import ast
index = []
all_objs = []
all_vars = []
with open(filename, 'r') as f:
for row in f.readlines():
gen, pop_id, objs, vars = ast.literal_eval(row.strip())
index.append((gen, pop_id))
all_objs.append(objs)
all_vars.append(vars)
index = pd.MultiIndex.from_tuples(index, names=['generation', 'individual'])
return pd.DataFrame(all_objs, index=index), pd.DataFrame(all_vars, index=index)
def animate_generations(objective_data, colors):
"""
Animate the pareto frontier plot over the saved generations.
"""
import matplotlib.animation as animation
def update_line(gen, dfs, ax, xmax, ymax):
ax.cla()
artists = []
for i in range(gen+1):
for c, key in zip(colors, sorted(dfs.keys())):
df = dfs[key]
scat = ax.scatter(df.loc[i][0], df.loc[i][1], alpha=0.8**(gen-i), color=c,
label=key if i == gen else None, clip_on=True, zorder=100)
artists.append(scat)
ax.set_title('Generation: {:d}'.format(gen))
ax.set_xlabel('Total demand deficit [Ml/d]')
ax.set_ylabel('Total Transferred volume [Ml/d]')
ax.set_xlim(0, xmax)
ax.set_ylim(0, ymax)
ax.legend()
ax.grid()
return artists
fig, ax = plt.subplots(figsize=(10, 10))
last_gen = list(objective_data.values())[0].index[-1][0]
last_gen = int(last_gen)
xmax = max(df.loc[last_gen][0].max() for df in objective_data.values())
ymax = max(df.loc[last_gen][1].max() for df in objective_data.values())
line_ani = animation.FuncAnimation(fig, update_line, last_gen+1,
fargs=(objective_data, ax, xmax, ymax), interval=400, repeat=False)
line_ani.save('generations.mp4', bitrate=1024,)
fig.savefig('generations.png')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--harmonic', action='store_true', help='Use an harmonic control curve.')
parser.add_argument('--plot', action='store_true', help='Plot the pareto frontier.')
args = parser.parse_args()
if args.plot:
objs, vars = {}, {}
for cctype in ('monthly', 'harmonic'):
objs[cctype], vars[cctype] = load_individuals('two_reservoir_moea-{}-individuals-file.csv'.format(cctype))
animate_generations(objs, ('b', 'r'))
plt.show()
else:
moea_main(display=True, harmonic=args.harmonic)
|
from __future__ import print_function
'''
qobuz.exception
~~~~~~~~~~~~~~~
:part_of: xbmc-qobuz
:copyright: (c) 2012 by Joachim Basmaison, Cyril Leclerc
:license: GPLv3, see LICENSE for more details.
'''
import sys
import pprint
import traceback
class QobuzXbmcError(Exception):
def __init__(self, **ka):
if not 'additional' in ka or ka['additional'] is None:
ka['additional'] = ''
if (not 'who' in ka) or (not 'what' in ka):
raise Exception(
'QobuzXbmcError', 'Missing constructor arguments (who|what)')
nl = "\n"
msg = "[QobuzXbmcError]" + nl
msg += " - who : " + pprint.pformat(ka['who']) + nl
msg += " - what : " + ka['what'] + nl
msg += " - additional : " + repr(ka['additional']) + nl
# msg += " - type : " + self.exc_type + nl
# msg += " - value : " + self.exc_value + nl
msg += " - Stack : " + nl
print("%s" % msg, file=sys.stderr)
print("%s" % traceback.print_exc(10), file=sys.stderr)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.utils.timezone
import django_extensions.db.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('void', '0009_auto_20151102_2135'),
]
operations = [
migrations.CreateModel(
name='ProxyMapping',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', blank=True, editable=False)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', blank=True, editable=False)),
('user_uri', models.URLField()),
('proxy_resource_uri', models.URLField()),
('skos_concept_uri', models.URLField()),
('object_id', models.PositiveIntegerField(blank=True, null=True)),
('mapping_type', models.ForeignKey(blank=True, to='contenttypes.ContentType', null=True)),
],
options={
'verbose_name': 'Proxy Mapping',
'verbose_name_plural': 'Proxy Mappings',
},
),
migrations.CreateModel(
name='ProxyResource',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', blank=True, editable=False)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', blank=True, editable=False)),
('proxy_uri', models.URLField(unique=True)),
('frequency', models.IntegerField(default=0)),
('label', models.TextField()),
('language', models.CharField(max_length=26, blank=True, null=True)),
],
options={
'verbose_name': 'Proxy Resource',
'verbose_name_plural': 'Proxy Resources',
},
),
migrations.CreateModel(
name='ProxyResourceField',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', blank=True, editable=False)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', blank=True, editable=False)),
('property_uri', models.URLField()),
('search_label', models.CharField(max_length=56)),
('dataset_uri', models.URLField()),
],
options={
'verbose_name': 'Proxy Resource Field',
'verbose_name_plural': 'Proxy Resource Field',
},
),
migrations.AlterField(
model_name='dataset',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', blank=True, editable=False),
),
migrations.AlterField(
model_name='dataset',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', blank=True, editable=False),
),
migrations.AlterField(
model_name='edmrecord',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', blank=True, editable=False),
),
migrations.AlterField(
model_name='edmrecord',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', blank=True, editable=False),
),
migrations.AddField(
model_name='proxyresourcefield',
name='dataset',
field=models.ForeignKey(blank=True, to='void.DataSet', null=True),
),
migrations.AddField(
model_name='proxyresource',
name='dataset',
field=models.ForeignKey(to='void.DataSet'),
),
migrations.AddField(
model_name='proxyresource',
name='proxy_field',
field=models.ForeignKey(to='void.ProxyResourceField'),
),
migrations.AddField(
model_name='proxymapping',
name='proxy_resource',
field=models.ForeignKey(blank=True, to='void.ProxyResource', null=True),
),
migrations.AddField(
model_name='proxymapping',
name='user',
field=models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='edmrecord',
name='proxy_resources',
field=models.ManyToManyField(to='void.ProxyResource'),
),
migrations.AlterUniqueTogether(
name='proxyresourcefield',
unique_together=set([('property_uri', 'dataset_uri')]),
),
]
|
"""Manage assets.
Usage:
./manage.py assets rebuild
Rebuild all known assets; this requires tracking to be enabled:
Only assets that have previously been built and tracked are
considered "known".
./manage.py assets rebuild --parse-templates
Try to find as many of the project's templates (hopefully all),
and check them for the use of assets. Rebuild all the assets
discovered in this way. If tracking is enabled, the tracking
database will be replaced by the newly found assets.
"""
import os
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django import template
from courant.core.assets.conf import settings
from courant.core.assets.templatetags.assets import AssetsNode as AssetsNodeO
from django.templatetags.assets import AssetsNode as AssetsNodeMapped
from courant.core.assets.merge import merge
from courant.core.assets.tracker import get_tracker
try:
import jinja2
except:
jinja2 = None
else:
from django_assets.jinja.extension import AssetsExtension
# Prepare a Jinja2 environment we can later use for parsing.
# If not specified by the user, put in there at least our own
# extension, which we will need most definitely to achieve anything.
_jinja2_extensions = getattr(settings, 'ASSETS_JINJA2_EXTENSIONS')
if not _jinja2_extensions:
_jinja2_extensions = [AssetsExtension.identifier]
jinja2_env = jinja2.Environment(extensions=_jinja2_extensions)
def _shortpath(abspath):
"""Make an absolute path relative to the project's settings module,
which would usually be the project directory."""
b = os.path.dirname(
os.path.normpath(
os.sys.modules[settings.SETTINGS_MODULE].__file__))
p = os.path.normpath(abspath)
return p[len(os.path.commonprefix([b, p])):]
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--parse-templates', action='store_true',
help='Rebuild assets found by parsing project templates '
'instead of using the tracking database.'),
make_option('--verbosity', action='store', dest='verbosity',
default='1', type='choice', choices=['0', '1', '2'],
help='Verbosity; 0=minimal output, 1=normal output, 2=all output'),
)
help = 'Manage assets.'
args = 'subcommand'
requires_model_validation = True
def handle(self, *args, **options):
if len(args) == 0:
raise CommandError('You need to specify a subcommand')
elif len(args) > 1:
raise CommandError('Invalid number of subcommands passed: %s' %
", ".join(args))
else:
command = args[0]
options['verbosity'] = int(options['verbosity'])
if command == 'rebuild':
if options.get('parse_templates') or not get_tracker():
assets = self._parse_templates(options)
else:
assets = dict()
self._rebuild_assets(options, assets)
else:
raise CommandError('Unknown subcommand: %s' % command)
def _rebuild_assets(self, options, assets):
for output, data in assets.items():
if options.get('verbosity') >= 1:
print "Building asset: %s" % output
try:
merge(data['sources'], output, data['filter'])
except Exception, e:
print self.style.ERROR("Failed, error was: %s" % e)
def _parse_templates(self, options):
# build a list of template directories based on configured loaders
template_dirs = []
if 'django.template.loaders.filesystem.load_template_source' in settings.TEMPLATE_LOADERS:
template_dirs.extend(settings.TEMPLATE_DIRS)
if 'django.template.loaders.app_directories.load_template_source' in settings.TEMPLATE_LOADERS:
from django.template.loaders.app_directories import app_template_dirs
template_dirs.extend(app_template_dirs)
found_assets = {}
# find all template files
if options.get('verbosity') >= 1:
print "Searching templates..."
total_count = 0
for template_dir in template_dirs:
for directory, _ds, files in os.walk(template_dir):
for filename in files:
if filename.endswith('.html'):
total_count += 1
tmpl_path = os.path.join(directory, filename)
self._parse_template(options, tmpl_path, found_assets)
if options.get('verbosity') >= 1:
print "Parsed %d templates, found %d valid assets." % (
total_count, len(found_assets))
return found_assets
def _parse_template(self, options, tmpl_path, found_assets):
def try_django(contents):
# parse the template for asset nodes
try:
t = template.Template(contents)
except template.TemplateSyntaxError, e:
if options.get('verbosity') >= 2:
print self.style.ERROR('\tdjango parser failed, error was: %s'%e)
return False
else:
result = []
def _recurse_node(node):
# depending on whether the template tag is added to
# builtins, or loaded via {% load %}, it will be
# available in a different module
if isinstance(node, (AssetsNodeMapped, AssetsNodeO)):
# try to resolve this node's data; if we fail,
# then it depends on view data and we cannot
# manually rebuild it.
try:
output, files, filter = node.resolve()
except template.VariableDoesNotExist:
if options.get('verbosity') >= 2:
print self.style.ERROR('\tskipping asset %s, depends on runtime data.' % node.output)
else:
result.append((output, files, filter))
# see Django #7430
for subnode in hasattr(node, 'nodelist') \
and node.nodelist\
or []:
_recurse_node(subnode)
for node in t: # don't move into _recurse_node, ``Template`` has a .nodelist attribute
_recurse_node(node)
return result
def try_jinja(contents):
try:
t = jinja2_env.parse(contents.decode(settings.DEFAULT_CHARSET))
except jinja2.exceptions.TemplateSyntaxError, e:
if options.get('verbosity') >= 2:
print self.style.ERROR('\tjinja parser failed, error was: %s'%e)
return False
else:
result = []
def _recurse_node(node):
for node in node.iter_child_nodes():
if isinstance(node, jinja2.nodes.Call):
if isinstance(node.node, jinja2.nodes.ExtensionAttribute)\
and node.node.identifier == AssetsExtension.identifier:
filter, output, files = node.args
result.append((output.as_const(),
files.as_const(),
filter.as_const()))
for node in t.iter_child_nodes():
_recurse_node(node)
return result
if options.get('verbosity') >= 2:
print "Parsing template: %s" % _shortpath(tmpl_path)
file = open(tmpl_path, 'rb')
try:
contents = file.read()
finally:
file.close()
result = try_django(contents)
if result is False and jinja2:
result = try_jinja(contents)
if result:
for output, files, filter in result:
if not output in found_assets:
if options.get('verbosity') >= 2:
print self.style.NOTICE('\tfound asset: %s' % output)
found_assets[output] = {
'sources': files,
'filter': filter,
}
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2013-2014 Steven Boeing, ETHZ
# script for running a set of cases
# requires cip!
import glob
import os
import getpass
myusername=getpass.getuser()
headdir='/users/'+myusername+'/csim/fromtempl'
mycases=['bomex']
myconfs=['c1']
expglob='20150112exp_000' # which experiments to select
def intersect(a, b):
return list(set(a) & set(b))
for case in mycases:
for conf in myconfs:
# find underlying experiments
curdir=headdir+'/'+case+'/'+conf+'/'
exps=glob.glob(curdir+expglob)
subdirs=[curdir+ i for i in os.walk(curdir).next()[1]]
# make sure experiment corresponds to actual working case
for exper in intersect(exps,subdirs):
os.chdir(exper)
os.system('cip clean')
os.system('cip start')
|
# coding=utf-8
# Copyright 2021 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""RNNs (recursive neural networks)."""
from trax import layers as tl
from trax.fastmath import numpy as jnp
def RNNLM(vocab_size,
d_model=512,
n_layers=2,
rnn_cell=tl.LSTMCell,
rnn_cell_d_state_multiplier=2,
dropout=0.1,
mode='train'):
"""Returns an RNN language model.
This model performs autoregressive language modeling:
- input: rank 2 tensor representing a batch of text strings via token IDs
plus padding markers; shape is (batch_size, sequence_length). The tensor
elements are integers in `range(vocab_size)`, and `0` values mark padding
positions.
- output: rank 3 tensor representing a batch of log-probability
distributions for each sequence position over possible token IDs;
shape is (batch_size, sequence_length, `vocab_size`).
Args:
vocab_size: Input vocabulary size -- each element of the input tensor
should be an integer in `range(vocab_size)`. These integers typically
represent token IDs from a vocabulary-based tokenizer.
d_model: Embedding depth throughout the model.
n_layers: Number of RNN layers.
rnn_cell: Type of RNN cell; must be a subclass of `Layer`.
rnn_cell_d_state_multiplier: Multiplier for feature depth of RNN cell
state.
dropout: Stochastic rate (probability) for dropping an activation value
when applying dropout.
mode: If `'predict'`, use fast inference; if `'train'` apply dropout.
Returns:
An RNN language model as a layer that maps from a tensor of tokens
to activations over a vocab set.
"""
if n_layers != 2: # TODO(jonni): Remove n_layers arg, if it can't vary?
raise ValueError(f'Number of layers must be set to 2; instead got'
f' {n_layers}.')
def MultiRNNCell():
"""Multi-layer RNN cell."""
return tl.Serial(
tl.Parallel([], tl.Split(n_items=n_layers)),
tl.SerialWithSideOutputs(
[rnn_cell(n_units=d_model) for _ in range(n_layers)]),
tl.Parallel([], tl.Concatenate(n_items=n_layers))
)
zero_state = tl.MakeZeroState( # pylint: disable=no-value-for-parameter
depth_multiplier=n_layers * rnn_cell_d_state_multiplier
)
return tl.Serial(
tl.ShiftRight(mode=mode),
tl.Embedding(vocab_size, d_model),
tl.Dropout(rate=dropout, mode=mode),
tl.Branch([], zero_state),
tl.Scan(MultiRNNCell(), axis=1, mode=mode),
tl.Select([0], n_in=2), # Drop RNN state.
tl.Dense(vocab_size),
)
def GRULM(vocab_size=256,
d_model=512,
n_layers=1,
mode='train'):
"""Returns a GRU (gated recurrent unit) language model.
This model performs autoregressive language modeling:
- input: rank 2 tensor representing a batch of text strings via token IDs
plus padding markers; shape is (batch_size, sequence_length). The tensor
elements are integers in `range(vocab_size)`, and `0` values mark padding
positions.
- output: rank 3 tensor representing a batch of log-probability
distributions for each sequence position over possible token IDs;
shape is (batch_size, sequence_length, `vocab_size`).
Args:
vocab_size: Input vocabulary size -- each element of the input tensor
should be an integer in `range(vocab_size)`. These integers typically
represent token IDs from a vocabulary-based tokenizer.
d_model: Embedding depth throughout the model.
n_layers: Number of GRU layers.
mode: If `'predict'`, use fast inference (and omit the right shift).
Returns:
A GRU language model as a layer that maps from a tensor of tokens
to activations over a vocab set.
"""
return tl.Serial(
tl.ShiftRight(mode=mode),
tl.Embedding(vocab_size, d_model),
[tl.GRU(d_model, mode=mode) for _ in range(n_layers)],
tl.Dense(vocab_size),
)
# TODO(jonni): Decide names (here and Transformer): input/source, output/target
# TODO(jonni): Align with Transfomer: (attention-)dropout, n-(attention-)heads
def LSTMSeq2SeqAttn(input_vocab_size=256,
target_vocab_size=256,
d_model=512,
n_encoder_layers=2,
n_decoder_layers=2,
n_attention_heads=1,
attention_dropout=0.0,
mode='train'):
"""Returns an LSTM sequence-to-sequence model with attention.
This model is an encoder-decoder that performs tokenized string-to-string
("source"-to-"target") transduction:
- inputs (2):
- source: rank 2 tensor representing a batch of text strings via token
IDs plus padding markers; shape is (batch_size, sequence_length). The
tensor elements are integers in `range(input_vocab_size)`, and `0`
values mark padding positions.
- target: rank 2 tensor representing a batch of text strings via token
IDs plus padding markers; shape is (batch_size, sequence_length). The
tensor elements are integers in `range(output_vocab_size)`, and `0`
values mark padding positions.
- output: rank 3 tensor representing a batch of log-probability
distributions for each sequence position over possible token IDs;
shape is (batch_size, sequence_length, `vocab_size`).
An example use would be to translate (tokenized) sentences from English to
German.
The model works as follows:
* Input encoder runs on the input tokens and creates activations that
are used as both keys and values in attention.
* Pre-attention decoder runs on the targets and creates
activations that are used as queries in attention.
* Attention runs on the queries, keys and values masking out input padding.
* Decoder runs on the result, followed by a cross-entropy loss.
Args:
input_vocab_size: Input vocabulary size -- each element of the input tensor
should be an integer in `range(vocab_size)`. These integers typically
represent token IDs from a vocabulary-based tokenizer.
target_vocab_size: Target vocabulary size.
d_model: Final dimension of tensors at most points in the model, including
the initial embedding output.
n_encoder_layers: Number of LSTM layers in the encoder.
n_decoder_layers: Number of LSTM layers in the decoder after attention.
n_attention_heads: Number of attention heads.
attention_dropout: Stochastic rate (probability) for dropping an activation
value when applying dropout within an attention block.
mode: If `'predict'`, use fast inference. If `'train'`, each attention block
will include dropout; else, it will pass all values through unaltered.
Returns:
An LSTM sequence-to-sequence model as a layer that maps from a
source-target tokenized text pair to activations over a vocab set.
"""
input_encoder = tl.Serial(
tl.Embedding(input_vocab_size, d_model),
[tl.LSTM(d_model) for _ in range(n_encoder_layers)],
)
pre_attention_decoder = tl.Serial(
tl.ShiftRight(mode=mode),
tl.Embedding(target_vocab_size, d_model),
tl.LSTM(d_model, mode=mode),
)
def PrepareAttentionInputs():
"""Layer that prepares queries, keys, values and mask for attention."""
def F(encoder_activations, decoder_activations, input_tokens):
keys = values = encoder_activations
queries = decoder_activations
# Mask is 1 where inputs are not padding (0) and 0 where they are padding.
mask = (input_tokens != 0)
# We need to add axes to the mask for attention heads and decoder length.
mask = jnp.reshape(mask, (mask.shape[0], 1, 1, mask.shape[1]))
# Broadcast so mask is [batch, 1 for heads, decoder-len, encoder-len].
mask = mask + jnp.zeros((1, 1, decoder_activations.shape[1], 1))
mask = mask.astype(jnp.float32)
return queries, keys, values, mask
return tl.Fn('PrepareAttentionInputs', F, n_out=4)
return tl.Serial( # in-toks, target-toks
tl.Select([0, 1, 0, 1]), # in-toks, target-toks, in-toks, target-toks
tl.Parallel(input_encoder, pre_attention_decoder),
PrepareAttentionInputs(), # q, k, v, mask, target-toks
tl.Residual(
tl.AttentionQKV(d_model, n_heads=n_attention_heads,
dropout=attention_dropout, mode=mode,
cache_KV_in_predict=True)
), # decoder-vecs, mask, target-toks
tl.Select([0, 2]), # decoder-vecs, target-toks
[tl.LSTM(d_model, mode=mode) for _ in range(n_decoder_layers)],
tl.Dense(target_vocab_size),
tl.LogSoftmax()
)
|
#!/usr/bin/env python
"""
This is the testing unit for MUSiCC
"""
# to comply with both Py2 and Py3
from __future__ import absolute_import, division, print_function
import unittest
import os
import pandas as pd
import musicc
from musicc.core import correct_and_normalize
class MUSiCCTestCase(unittest.TestCase):
"""Tests for `musicc.py`."""
path_to_data = os.path.dirname(musicc.__file__)
def test_is_output_correct_for_normalization_only(self):
"""Does MUSiCC produce the correct output for normalization of the example case?"""
print(MUSiCCTestCase.path_to_data)
# define the arguments needed by MUSiCC
musicc_args = {'input_file': MUSiCCTestCase.path_to_data + '/examples/simulated_ko_relative_abundance.tab',
'output_file': MUSiCCTestCase.path_to_data + '/examples/test1.tab',
'input_format': 'tab', 'output_format': 'tab', 'musicc_inter': True,
'musicc_intra': 'None', 'compute_scores': True, 'verbose': False}
# run the MUSiCC correction
correct_and_normalize(musicc_args)
# assert that the result is equal to the example (up to small difference due to OS/Other)
example = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/simulated_ko_MUSiCC_Normalized.tab', index_col=0)
output = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/test1.tab', index_col=0)
example_vals = example.values
output_vals = output.values
self.assertTrue(example_vals.shape[0] == output_vals.shape[0])
self.assertTrue(example_vals.shape[1] == output_vals.shape[1])
for i in range(example_vals.shape[0]):
for j in range(example_vals.shape[1]):
self.assertTrue(abs(example_vals[i, j] - output_vals[i, j]) < 1)
os.remove(MUSiCCTestCase.path_to_data + '/examples/test1.tab')
def test_is_output_correct_for_normalization_correction_use_generic(self):
"""Does MUSiCC produce the correct output for normalization and correction of the example case?"""
# define the arguments needed by MUSiCC
musicc_args = {'input_file': MUSiCCTestCase.path_to_data + '/examples/simulated_ko_relative_abundance.tab',
'output_file': MUSiCCTestCase.path_to_data + '/examples/test2.tab',
'input_format': 'tab', 'output_format': 'tab', 'musicc_inter': True,
'musicc_intra': 'use_generic', 'compute_scores': True, 'verbose': False}
# run the MUSiCC correction
correct_and_normalize(musicc_args)
# assert that the result is equal to the example (up to small difference due to OS/Other)
example = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/simulated_ko_MUSiCC_Normalized_Corrected_use_generic.tab', index_col=0)
output = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/test2.tab', index_col=0)
example_vals = example.values
output_vals = output.values
self.assertTrue(example_vals.shape[0] == output_vals.shape[0])
self.assertTrue(example_vals.shape[1] == output_vals.shape[1])
for i in range(example_vals.shape[0]):
for j in range(example_vals.shape[1]):
self.assertTrue(abs(example_vals[i, j] - output_vals[i, j]) < 1)
os.remove(MUSiCCTestCase.path_to_data + '/examples/test2.tab')
def test_is_output_correct_for_normalization_correction_learn_model(self):
"""Does MUSiCC produce the correct output for normalization and correction of the example case?"""
# define the arguments needed by MUSiCC
musicc_args = {'input_file': MUSiCCTestCase.path_to_data + '/examples/simulated_ko_relative_abundance.tab',
'output_file': MUSiCCTestCase.path_to_data + '/examples/test3.tab',
'input_format': 'tab', 'output_format': 'tab', 'musicc_inter': True,
'musicc_intra': 'learn_model', 'compute_scores': True, 'verbose': False}
# run the MUSiCC correction
correct_and_normalize(musicc_args)
# assert that the result is equal to the example (up to small difference due to de novo learning)
example = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/simulated_ko_MUSiCC_Normalized_Corrected_learn_model.tab', index_col=0)
output = pd.read_table(MUSiCCTestCase.path_to_data + '/examples/test3.tab', index_col=0)
example_vals = example.values
output_vals = output.values
self.assertTrue(example_vals.shape[0] == output_vals.shape[0])
self.assertTrue(example_vals.shape[1] == output_vals.shape[1])
for i in range(example_vals.shape[0]):
for j in range(example_vals.shape[1]):
self.assertTrue(abs(example_vals[i, j] - output_vals[i, j]) < 1)
os.remove(MUSiCCTestCase.path_to_data + '/examples/test3.tab')
################################################
if __name__ == '__main__':
unittest.main()
|
"""
A script to obtain the Ashlock Fingerprints of all strategies in the Axelrod
library.
This writes a hash of the source code of each strategy to file: db.csv.
If the source code of a strategy changes **or** a new strategy is introduced
then the fingerprint is regenerated for that strategy.
"""
import inspect
import hashlib
import csv
import string
import numpy as np
import matplotlib.pyplot as plt
import axelrod as axl
def hash_strategy(strategy):
"""
Hash the source code of a strategy
"""
try:
source_code = "".join(inspect.getsourcelines(strategy)[0])
except OSError: # Some classes are dynamically created
source_code = "".join(inspect.getsourcelines(strategy.strategy)[0])
hash_object = hashlib.md5(source_code.encode("utf-8"))
hashed_source = hash_object.hexdigest()
return hashed_source
def write_strategy_to_db(strategy, filename="db.csv", fingerprint="Ashlock"):
"""
Write the hash of a strategy to the db
"""
hashed_source = hash_strategy(strategy)
with open(filename, "a") as db:
try:
db.write(
"{},{},{}\n".format(
strategy.original_name, fingerprint, hashed_source
)
)
except AttributeError:
db.write(
"{},{},{}\n".format(strategy.name, fingerprint, hashed_source)
)
def read_db(filename="db.csv"):
"""
Read filename and return a dictionary mapping string names to hash of source
code of a strategy
"""
with open(filename, "r") as db:
csvreader = csv.reader(db)
str_to_hash = {(row[0], row[1]): row[2] for row in csvreader}
return str_to_hash
def create_db(filename="db.csv"):
"""
Creates an empty db.csv file
"""
with open(filename, "w"):
pass
def write_data_to_file(fp, filename):
"""
Write the fingerprint data to a file.
"""
columns = ["x", "y", "score"]
with open(filename, "w") as f:
w = csv.writer(f)
w.writerow(columns)
for key, value in fp.data.items():
w.writerow([key.x, key.y, value])
def obtain_fingerprint(
strategy, turns, repetitions, probe=axl.TitForTat, processes=1
):
"""
Obtain the fingerprint for a given strategy and save the figure to the
assets dir
"""
fp = axl.AshlockFingerprint(strategy, probe)
fp.fingerprint(
turns=turns,
repetitions=repetitions,
progress_bar=False,
processes=processes,
)
plt.figure()
fp.plot()
try:
name = strategy.original_name
except AttributeError:
name = strategy.name
plt.tight_layout()
plt.savefig(
"assets/{}.png".format(format_filename(name)), bbox_inches="tight"
)
write_data_to_file(fp, "assets/{}.csv".format(format_filename(name)))
def obtain_transitive_fingerprint(strategy, turns, repetitions, processes=1):
"""
Obtain the transitive fingerprint
for a given strategy and save the figure to the assets dir
"""
fp = axl.TransitiveFingerprint(strategy, number_of_opponents=30)
fp.fingerprint(
turns=turns,
repetitions=repetitions,
progress_bar=False,
processes=processes,
)
plt.figure()
fp.plot()
try:
name = strategy.original_name
except AttributeError:
name = strategy.name
plt.tight_layout()
plt.savefig(
"assets/transitive_{}.png".format(format_filename(name)),
bbox_inches="tight",
)
np.savetxt(
"assets/transitive_{}.csv".format(format_filename(name)), fp.data
)
def obtain_transitive_fingerprint_v_short(
strategy, turns, repetitions, processes=1
):
"""
Obtain the transitive fingerprint against short run time
for a given strategy and save the figure to the assets dir
"""
short_run_time = [s() for s in axl.short_run_time_strategies]
fp = axl.TransitiveFingerprint(strategy, opponents=short_run_time)
fp.fingerprint(
turns=turns,
repetitions=repetitions,
progress_bar=False,
processes=processes,
)
plt.figure()
fp.plot(display_names=True)
try:
name = strategy.original_name
except AttributeError:
name = strategy.name
plt.tight_layout()
plt.savefig(
"assets/transitive_v_short_{}.png".format(format_filename(name)),
bbox_inches="tight",
)
np.savetxt(
"assets/transitive_v_short_{}.csv".format(format_filename(name)),
fp.data,
)
def format_filename(s):
"""
Take a string and return a valid filename constructed from the string.
Uses a whitelist approach: any characters not present in valid_chars are
removed. Also spaces are replaced with underscores.
Note: this method may produce invalid filenames such as ``, `.` or `..`
When I use this method I prepend a date string like '2009_01_15_19_46_32_'
and append a file extension like '.txt', so I avoid the potential of using
an invalid filename.
Borrowed from https://gist.github.com/seanh/93666
"""
valid_chars = "-_.() {}{}".format(string.ascii_letters, string.digits)
filename = "".join(c for c in s if c in valid_chars)
filename = filename.replace(" ", "_")
return filename
def write_markdown(strategy):
"""
Write a markdown section of a strategy.
"""
try:
name = strategy.original_name
except AttributeError:
name = strategy.name
markdown = """
## {0}

[data (csv)](./assets/{1}.csv)

[data (csv)](./assets/transitive_{1}.csv)

[data (csv)](./assets/transitive_v_short_{1}.csv)
""".format(
name, format_filename(name)
)
return markdown
def main(
turns,
repetitions,
transitive_turns,
transitive_repetitions,
transitive_v_short_turns,
transitive_v_short_repetitions,
processes,
):
"""
Fingerprint all strategies, if a strategy has already been fingerprinted it
does not get rerun.
"""
version = axl.__version__
markdown = """# Ashlock and transitive fingerprints
See:
[axelrod.readthedocs.io/en/latest/tutorials/further_topics/fingerprinting.html#fingerprinting](http://axelrod.readthedocs.io/en/latest/tutorials/further_topics/fingerprinting.html#fingerprinting)
All strategies included from Axelrod version {}.
This README.md file is autogenerated by running:
```
$ python update_fingerprints.py
```
Each individual fingerprint can be obtained by running:
```python
import axelrod as axl
fp = axl.AshlockFingerprint(strategy, probe)
fp.fingerprint(turns={}, repetitions={})
fp.plot()
```
# Axelrod library fingerprints
""".format(
version, turns, repetitions
)
try:
db = read_db()
except FileNotFoundError:
create_db()
db = read_db()
for strategy in axl.short_run_time_strategies:
name = strategy.name
signature = hash_strategy(strategy)
fp = "Ashlock"
if (name, fp) not in db or db[name, fp] != signature:
obtain_fingerprint(
strategy, turns, repetitions, processes=processes
)
write_strategy_to_db(strategy, fingerprint=fp)
fp = "Transitive"
if (name, fp) not in db or db[name, fp] != signature:
obtain_transitive_fingerprint(
strategy,
transitive_turns,
transitive_repetitions,
processes=processes,
)
write_strategy_to_db(strategy, fingerprint=fp)
fp = "Transitive_v_short"
if (name, fp) not in db or db[name, fp] != signature:
obtain_transitive_fingerprint_v_short(
strategy,
transitive_v_short_turns,
transitive_v_short_repetitions,
processes=processes,
)
write_strategy_to_db(strategy, fingerprint=fp)
markdown += write_markdown(strategy)
with open("README.md", "w") as outfile:
outfile.write(markdown)
if __name__ == "__main__":
turns, repetitions = 200, 20
transitive_turns, transitive_repetitions = 200, 20
transitive_v_short_turns, transitive_v_short_repetitions = 200, 20
processes = 20
main(
turns=turns,
repetitions=repetitions,
transitive_turns=transitive_turns,
transitive_repetitions=transitive_repetitions,
transitive_v_short_turns=transitive_v_short_turns,
transitive_v_short_repetitions=transitive_v_short_repetitions,
processes=processes,
)
|
##########################################################################
# This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from __future__ import print_function
import os
from six import u
# This file takes the files in the /tests directory, then converts them
# into strings in wtframework/wtf/_devtools_/filetemplates/examples.py
# These are the files that are generated when the user does --withexamples
# in the project generator
if __name__ == '__main__':
example_path = os.path.join('wtframework', 'wtf', '_devtools_', 'filetemplates', '_examples_.py')
print(example_path)
examples_file = open(example_path,
"w")
examples_file.write(u("""##########################################################################
#This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from six import u
examples = {}
"""))
for root, dirs, files in os.walk('tests'):
for example_file in files:
if not example_file.endswith(".py"):
continue
fpath = os.path.join(root, example_file)
print("processing ", fpath)
the_file = open(fpath)
examples_file.write(u("examples['" + fpath + "'] = u('''"))
examples_file.write(u(the_file.read().replace("'''", '"""')))
examples_file.write(u("\n''')\n\n"))
examples_file.close()
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
try:
basestring
except NameError:
basestring = str
from compas_rhino.forms.base import BaseForm
from System.Windows.Forms import PictureBox
from System.Windows.Forms import PictureBoxSizeMode
from System.Windows.Forms import DockStyle
from System.Drawing import Image
from System.Net import WebClient
from System.IO import MemoryStream
__all__ = ['ImageForm', 'image_from_remote', 'image_from_local']
def image_from_remote(source):
"""Construct an image from a remote source.
Parameters
----------
source : str
The url of the remote source.
Returns
-------
System.Drawing.Image
Representation of an miage in memory.
Examples
--------
.. code-block:: python
image = image_from_remote('http://block.arch.ethz.ch/brg/images/cache/dsc02360_ni-2_cropped_1528706473_624x351.jpg')
"""
w = WebClient()
d = w.DownloadData(source)
m = MemoryStream(d)
return Image.FromStream(m)
def image_from_local(source):
"""Construct an image from a local source.
Parameters
----------
source : str
The path to the local source file.
Returns
-------
System.Drawing.Image
Representation of an miage in memory.
Examples
--------
.. code-block:: python
image = image_from_local('theblock.jpg')
"""
return Image.FromFile(source)
class ImageForm(BaseForm):
"""A form for displaying images.
Parameters
----------
image : {str, Image}
The image that should be displayed.
This can be a url of a remote image file,
or a local file path,
or an instance of ``System.Drawing.Image``.
title : str, optional
Title of the form.
Default is ``ImageForm``.
width : int, optional
Width of the form.
Default is ``None``.
height : int, optional
Height of the form.
Default is ``None``.
Examples
--------
.. code-block:: python
from compas_rhino.forms import ImageForm
form = ImageForm('http://block.arch.ethz.ch/brg/images/cache/dsc02360_ni-2_cropped_1528706473_624x351.jpg')
form.show()
"""
def __init__(self, image, title='Image', width=None, height=None):
self._image = None
self.image = image
super(ImageForm, self).__init__(title, width, height)
@property
def image(self):
"""System.Drawing.Image: An instance of ``System.Drawing.Image``.
"""
return self._image
@image.setter
def image(self, image):
if isinstance(image, basestring):
if image.startswith('http'):
self._image = image_from_remote(image)
else:
self._image = image_from_local(image)
elif isinstance(image, Image):
self._image = image
else:
raise NotImplementedError
def init(self):
box = PictureBox()
box.Dock = DockStyle.Fill
box.SizeMode = PictureBoxSizeMode.AutoSize
box.Image = self.image
self.image = box.Image
self.Controls.Add(box)
self.ClientSize = box.Size
def on_form_closed(self, sender, e):
self.image.Dispose()
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
'''
pkgdb tests for the Flask API regarding collections.
'''
__requires__ = ['SQLAlchemy >= 0.8']
import pkg_resources
import json
import unittest
import sys
import os
from mock import patch
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.abspath(__file__)), '..'))
import pkgdb2
from pkgdb2 import lib as pkgdblib
from pkgdb2.lib import model
from tests import (Modeltests, FakeFasUser, FakeFasUserAdmin,
create_collection, create_package_acl,
create_package_critpath, user_set)
class FlaskApiPackagesTest(Modeltests):
""" Flask API Packages tests. """
def setUp(self):
""" Set up the environnment, ran before every tests. """
super(FlaskApiPackagesTest, self).setUp()
pkgdb2.APP.config['TESTING'] = True
pkgdb2.SESSION = self.session
pkgdb2.api.packages.SESSION = self.session
pkgdb2.ui.SESSION = self.session
self.app = pkgdb2.APP.test_client()
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.is_admin')
def test_api_package_new(self, login_func, mock_func):
""" Test the api_package_new function. """
login_func.return_value = None
# Redirect as you are not admin
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/')
self.assertEqual(output.status_code, 302)
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'error_detail', 'output']
)
self.assertEqual(
data['error'], "Invalid input submitted")
self.assertEqual(
data['output'], "notok")
self.assertEqual(
sorted(data['error_detail']),
[
"branches: This field is required.",
"pkgname: This field is required.",
"poc: This field is required.",
"review_url: This field is required.",
"status: Not a valid choice",
"summary: This field is required.",
]
)
data = {
'pkgname': 'gnome-terminal',
'summary': 'Terminal emulator for GNOME',
'description': 'Terminal for GNOME...',
'review_url': 'http://bugzilla.redhat.com/1234',
'status': '',
'critpath': '',
'branches': '',
'poc': '',
'upstream_url': '',
'monitoring_status': '1',
'namespace': 'foo',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"status: This field is required.",
"namespace: Not a valid choice",
"monitoring_status: Not a valid choice",
"branches: '' is not a valid choice for this field",
"poc: This field is required.",
],
"output": "notok"
}
)
data = {
'pkgname': 'gnome-terminal',
'summary': 'Terminal emulator for GNOME',
'description': 'Terminal for GNOME...',
'review_url': 'http://bugzilla.redhat.com/1234',
'status': 'Approved',
'branches': 'master',
'poc': 'mclasen',
'upstream_url': 'http://www.gnome.org/',
'critpath': False,
'namespace': 'rpms',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"branches: 'master' is not a valid choice for this "
"field"
],
"output": "notok"
}
)
create_collection(self.session)
data = {
'pkgname': 'gnome-terminal',
'summary': 'Terminal emulator for GNOME',
'description': 'Terminal for GNOME...',
'review_url': 'http://bugzilla.redhat.com/1234',
'status': 'Approved',
'branches': 'master',
'poc': 'mclasen',
'upstream_url': 'http://www.gnome.org/',
'critpath': False,
'namespace': 'rpms',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "User \"mclasen\" is not in the packager group",
"output": "notok"
}
)
mock_func.get_packagers.return_value = ['mclasen']
mock_func.log.return_value = ''
data = {
'pkgname': 'gnome-terminal',
'summary': 'Terminal emulator for GNOME',
'description': 'Terminal for GNOME...',
'review_url': 'http://bugzilla.redhat.com/1234',
'status': 'Approved',
'branches': 'master',
'poc': 'mclasen',
'upstream_url': 'http://www.gnome.org/',
'critpath': False,
'monitoring_status': 'nobuild',
'koschei': True,
'namespace': 'rpms',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/new/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": [
"Package created"
],
"output": "ok"
}
)
# Check that the package created has the property we asked
output = self.app.get('/api/package/gnome-terminal/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data['packages'][0]['package']['koschei_monitor'], True)
self.assertEqual(
data['packages'][0]['package']['monitor'], 'nobuild')
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_orphan(self, login_func, mock_func):
""" Test the api_package_orphan function. """
login_func.return_value = None
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/')
self.assertEqual(output.status_code, 302)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"pkgnames: This field is required.",
"branches: This field is required.",
],
"output": "notok"
}
)
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found by this name",
"output": "notok"
}
)
create_package_acl(self.session)
mock_func.log.return_value = ''
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['el4', 'f18'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'error': 'The package rpms/guake could not be found in '
'the collection el4.',
'messages': [''],
'output': 'ok'
}
)
pkg_acl = pkgdblib.get_acl_package(
self.session, 'rpms', 'guake')
self.assertEqual(pkg_acl[0].collection.branchname, 'f18')
self.assertEqual(pkg_acl[0].package.name, 'guake')
self.assertEqual(pkg_acl[0].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[0].status, 'Orphaned')
self.assertEqual(pkg_acl[1].collection.branchname, 'master')
self.assertEqual(pkg_acl[1].package.name, 'guake')
self.assertEqual(pkg_acl[1].point_of_contact, 'pingou')
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": ["", ""],
"output": "ok"
}
)
pkg_acl = pkgdblib.get_acl_package(
self.session, 'rpms', 'guake')
self.assertEqual(pkg_acl[0].collection.branchname, 'f18')
self.assertEqual(pkg_acl[0].package.name, 'guake')
self.assertEqual(pkg_acl[0].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[0].status, 'Orphaned')
self.assertEqual(pkg_acl[1].collection.branchname, 'master')
self.assertEqual(pkg_acl[1].package.name, 'guake')
self.assertEqual(pkg_acl[1].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[1].status, 'Orphaned')
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_unorphan(self, login_func, mock_func):
""" Test the api_package_unorphan function. """
login_func.return_value = None
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/')
self.assertEqual(output.status_code, 302)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"pkgnames: This field is required.",
"branches: This field is required.",
"poc: This field is required.",
],
"output": "notok"
}
)
mock_func.get_packagers.return_value = ['test']
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found by this name",
"output": "notok"
}
)
create_package_acl(self.session)
mock_func.log.return_value = ''
# Unorphan a not-orphaned package
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": [
'Package "rpms/guake" is not orphaned on master',
'Package "rpms/guake" is not orphaned on f18',
],
"output": "notok"
}
)
# Orphan the package
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/orphan/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": ["", ""],
"output": "ok"
}
)
pkg_acl = pkgdblib.get_acl_package(
self.session, 'rpms', 'guake')
self.assertEqual(pkg_acl[0].collection.branchname, 'f18')
self.assertEqual(pkg_acl[0].package.name, 'guake')
self.assertEqual(pkg_acl[0].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[0].status, 'Orphaned')
self.assertEqual(pkg_acl[1].collection.branchname, 'master')
self.assertEqual(pkg_acl[1].package.name, 'guake')
self.assertEqual(pkg_acl[1].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[1].status, 'Orphaned')
# Unorphan the package for someone else
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to update ACLs of someone "
"else.",
"output": "notok"
}
)
mock_func.get_packagers.return_value = ['pingou']
# Unorphan the package on a branch where it is not
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['el4', 'f18'],
'poc': 'pingou',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'error':
'Package "rpms/guake" is not in the collection el4',
"messages": [
"Package rpms/guake has been unorphaned on f18 by pingou"
],
'output': 'ok'
}
)
pkg_acl = pkgdblib.get_acl_package(
self.session, 'rpms', 'guake')
self.assertEqual(pkg_acl[0].collection.branchname, 'f18')
self.assertEqual(pkg_acl[0].package.name, 'guake')
self.assertEqual(pkg_acl[0].point_of_contact, 'pingou')
self.assertEqual(pkg_acl[0].status, 'Approved')
self.assertEqual(pkg_acl[1].collection.branchname, 'master')
self.assertEqual(pkg_acl[1].package.name, 'guake')
self.assertEqual(pkg_acl[1].point_of_contact, 'orphan')
self.assertEqual(pkg_acl[1].status, 'Orphaned')
# Unorphan the package
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'pingou',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unorphan/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": 'Package "rpms/guake" is not orphaned on f18',
"messages": [
"Package rpms/guake has been unorphaned on master by pingou"
],
"output": "ok"
}
)
pkg_acl = pkgdblib.get_acl_package(
self.session, 'rpms', 'guake')
self.assertEqual(pkg_acl[0].collection.branchname, 'f18')
self.assertEqual(pkg_acl[0].package.name, 'guake')
self.assertEqual(pkg_acl[0].point_of_contact, 'pingou')
self.assertEqual(pkg_acl[0].status, 'Approved')
self.assertEqual(pkg_acl[1].collection.branchname, 'master')
self.assertEqual(pkg_acl[1].package.name, 'guake')
self.assertEqual(pkg_acl[1].point_of_contact, 'pingou')
self.assertEqual(pkg_acl[1].status, 'Approved')
@patch('pkgdb2.lib.utils.set_bugzilla_owner')
@patch('pkgdb2.packager_login_required')
def test_api_package_retire(self, login_func, mock_func):
""" Test the api_package_retire function. """
login_func.return_value = None
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/')
self.assertEqual(output.status_code, 302)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"pkgnames: This field is required.",
"branches: This field is required.",
],
"output": "notok"
}
)
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found by this name",
"output": "notok"
}
)
create_package_acl(self.session)
mock_func.log.return_value = ''
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
# User is not an admin
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to retire the package: "
"rpms/guake on branch f18.",
"output": "notok"
}
)
data = {
'pkgnames': 'guake',
'branches': ['master'],
'poc': 'test',
}
# User is not the poc
user.username = 'toshio'
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to retire this package.",
"output": "notok"
}
)
# Retire the package on a non-existant branch
user = FakeFasUser()
data = {
'pkgnames': 'guake',
'branches': ['el6'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package rpms/guake found in collection el6",
"output": "notok"
}
)
# Check before
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(data['packages'][0]['collection']['branchname'],
'f18')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][1]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][1]['point_of_contact'],
'pingou')
for acl in data['packages'][0]['acls']:
self.assertEqual(acl['status'], 'Approved')
# Retire the package
user = FakeFasUserAdmin()
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": [
"user: admin updated package: guake status from: "
"Approved to Retired on branch: f18",
"user: admin updated package: guake status from: "
"Approved to Retired on branch: master",
],
"output": "ok"
}
)
# Check after retiring
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(data['packages'][0]['collection']['branchname'],
'f18')
self.assertEqual(data['packages'][0]['point_of_contact'],
'orphan')
self.assertEqual(data['packages'][1]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][1]['point_of_contact'],
'orphan')
for acl in data['packages'][0]['acls']:
if acl['fas_name'] == 'group::provenpackager':
continue
self.assertEqual(acl['status'], 'Obsolete')
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_retire2(self, login_func, mock_func):
""" Test a second time the api_package_retire function. """
login_func.return_value = None
create_package_acl(self.session)
mock_func.log.return_value = ''
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
# Add the EPEL 7 collection
collection = model.Collection(
name='Fedora EPEL',
version='7',
status='Active',
owner='kevin',
branchname='epel7',
dist_tag='.el7',
allow_retire=True,
)
self.session.add(collection)
self.session.commit()
# Add guake to epel7
guake_pkg = model.Package.by_name(self.session, 'rpms', 'guake')
el7_collec = model.Collection.by_name(self.session, 'epel7')
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=guake_pkg.id,
collection_id=el7_collec.id,
)
self.session.add(pkgltg)
self.session.commit()
# Check before
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 3)
self.assertEqual(data['output'], 'ok')
self.assertEqual(data['packages'][0]['collection']['branchname'],
'f18')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][1]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][1]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][2]['collection']['branchname'],
'epel7')
self.assertEqual(data['packages'][2]['point_of_contact'],
'pingou')
for acl in data['packages'][1]['acls']:
self.assertTrue(acl['status'] in ['Awaiting Review','Approved'])
self.assertFalse('acls' in data['packages'][2])
# Retire the package on an EPEL branch
user = FakeFasUser()
data = {
'pkgnames': 'guake',
'branches': ['master', 'epel7'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": ["", ""],
"output": "ok"
}
)
# Check after retiring
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 3)
self.assertEqual(data['output'], 'ok')
self.assertEqual(data['packages'][0]['collection']['branchname'],
'f18')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][1]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][1]['point_of_contact'],
'orphan')
self.assertEqual(data['packages'][2]['collection']['branchname'],
'epel7')
self.assertEqual(data['packages'][2]['point_of_contact'],
'orphan')
for acl in data['packages'][1]['acls']:
if acl['fas_name'] == 'group::provenpackager':
continue
self.assertEqual(acl['status'], 'Obsolete')
self.assertFalse('acls' in data['packages'][2])
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_retire3(self, login_func, mock_func):
""" Test a third time the api_package_retire function. """
login_func.return_value = None
create_package_acl(self.session)
mock_func.log.return_value = ''
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
# Add the EPEL 7 collection
collection = model.Collection(
name='Fedora EPEL',
version='7',
status='Active',
owner='kevin',
branchname='epel7',
dist_tag='.el7',
allow_retire=True,
)
self.session.add(collection)
self.session.commit()
# Add guake to epel7
guake_pkg = model.Package.by_name(self.session, 'rpms', 'guake')
el7_collec = model.Collection.by_name(self.session, 'epel7')
pkgltg = model.PackageListing(
point_of_contact='orphan',
status='Orphaned',
package_id=guake_pkg.id,
collection_id=el7_collec.id,
)
self.session.add(pkgltg)
self.session.commit()
# Retire an orphaned package on an EPEL branch
user = FakeFasUser()
data = {
'pkgnames': 'guake',
'branches': ['epel7'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": [""],
"output": "ok"
}
)
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_retire4(self, login_func, mock_func):
""" Test a fourth time the api_package_retire function. """
login_func.return_value = None
create_package_acl(self.session)
mock_func.log.return_value = ''
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
# Add the EPEL 7 collection
collection = model.Collection(
name='Fedora EPEL',
version='7',
status='Active',
owner='kevin',
branchname='epel7',
dist_tag='.el7',
allow_retire=True,
)
self.session.add(collection)
self.session.commit()
# Add guake to epel7
guake_pkg = model.Package.by_name(self.session, 'rpms', 'guake')
el7_collec = model.Collection.by_name(self.session, 'epel7')
pkgltg = model.PackageListing(
point_of_contact='kevin',
status='Approved',
package_id=guake_pkg.id,
collection_id=el7_collec.id,
)
self.session.add(pkgltg)
self.session.commit()
# No idea but access pkgltg.id later on fails with:
# DetachedInstanceError: Instance <PackageListing at ...> is not
# bound to a Session; attribute refresh operation cannot proceed
pkgltg_id = pkgltg.id
user = FakeFasUser()
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['epel7'],
}
# User does not have approveacls and is not PoC on that branch
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to retire this package.",
"output": "notok"
}
)
# Give approveacls to pingou on guake branch epel7:
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkgltg_id,
acl='approveacls',
status='Approved',
)
self.session.add(packager)
self.session.commit()
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['epel7'],
}
# Retire a package where user has `approveacls` but is not PoC
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": [""],
"output": "ok"
}
)
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_retire5(self, login_func, mock_func):
""" Test a fifth time the api_package_retire function. """
login_func.return_value = None
create_package_acl(self.session)
mock_func.log.return_value = ''
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
# Add the EPEL 7 collection where we block retiring packages
collection = model.Collection(
name='Fedora EPEL',
version='7',
status='Active',
owner='kevin',
branchname='epel7',
dist_tag='.el7',
allow_retire=False,
)
self.session.add(collection)
self.session.commit()
# Add guake to epel7
guake_pkg = model.Package.by_name(self.session, 'rpms', 'guake')
el7_collec = model.Collection.by_name(self.session, 'epel7')
pkgltg = model.PackageListing(
point_of_contact='kevin',
status='Approved',
package_id=guake_pkg.id,
collection_id=el7_collec.id,
)
self.session.add(pkgltg)
self.session.commit()
# No idea but access pkgltg.id later on fails with:
# DetachedInstanceError: Instance <PackageListing at ...> is not
# bound to a Session; attribute refresh operation cannot proceed
pkgltg_id = pkgltg.id
# Give approveacls to pingou on guake branch epel7:
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkgltg_id,
acl='approveacls',
status='Approved',
)
self.session.add(packager)
self.session.commit()
user = FakeFasUser()
data = {
'pkgnames': 'guake',
'branches': ['epel7'],
}
# Collection does not support retiring a package
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/retire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to retire the package: "
"rpms/guake on branch epel7.",
"output": "notok"
}
)
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_api_package_unretire(self, login_func, mock_func):
""" Test the api_package_unretire function. """
login_func.return_value = None
# Redirect as you are not a packager
user = FakeFasUser()
user.groups = []
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unretire/')
self.assertEqual(output.status_code, 302)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unretire/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"pkgnames: This field is required.",
"branches: This field is required.",
],
"output": "notok"
}
)
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
'poc': 'test',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unretire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found by this name",
"output": "notok"
}
)
create_package_acl(self.session)
mock_func.log.return_value = ''
# User is not an admin
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unretire/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "You are not allowed to update the status of "
"the package: rpms/guake on branch f18 to "
"Approved.",
"output": "notok"
}
)
# Unretire the package
user = FakeFasUserAdmin()
data = {
'pkgnames': 'guake',
'branches': ['f18', 'master'],
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/unretire/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": ["", ""],
"output": "ok"
}
)
def test_api_package_info(self):
""" Test the api_package_info function. """
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Package: rpms/guake not found",
"output": "notok"
}
)
create_package_acl(self.session)
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(data['packages'][0]['collection']['branchname'],
'f18')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][0]['package']['name'],
'guake')
self.assertEqual(data['packages'][1]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][1]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][1]['package']['name'],
'guake')
output = self.app.get('/api/package/?pkgname=guake&branches=master')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 1)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
set(data['packages'][0].keys()),
set(['status', 'point_of_contact', 'package', 'collection',
'acls', 'status_change', 'critpath']))
self.assertEqual(
[acl['fas_name'] for acl in data['packages'][0]['acls']],
['pingou', 'pingou', 'pingou', 'toshio', 'ralph',
'group::provenpackager'])
self.assertEqual(data['packages'][0]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][0]['package']['name'],
'guake')
output = self.app.get(
'/api/package/?pkgname=guake&branches=master&acls=0')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data.keys(), ['output', 'packages'])
self.assertEqual(len(data['packages']), 1)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
set(data['packages'][0].keys()),
set(['status', 'point_of_contact', 'package', 'collection',
'status_change', 'critpath']))
self.assertEqual(data['packages'][0]['collection']['branchname'],
'master')
self.assertEqual(data['packages'][0]['point_of_contact'],
'pingou')
self.assertEqual(data['packages'][0]['package']['name'],
'guake')
output = self.app.get('/api/package/?pkgname=guake&branches=f19')
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found on these branches: f19",
"output": "notok"
}
)
def test_api_package_list(self):
""" Test the api_package_list function. """
output = self.app.get('/api/packages/guake/')
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No packages found for these parameters",
"packages": [],
"output": "notok",
"page_total": 1,
"page": 1,
}
)
create_package_acl(self.session)
create_package_critpath(self.session)
output = self.app.get('/api/packages/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 1)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
data['packages'][0]['name'], 'guake')
self.assertEqual(
data['packages'][0]['status'], 'Approved')
self.assertEqual(
data['packages'][0]['summary'], 'Top down terminal for GNOME')
output = self.app.get('/api/packages/g*/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['packages'][0]['acls'], [])
self.assertEqual(data['packages'][1]['acls'], [])
output = self.app.get('/api/packages/g*/?count=True')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"output": "ok",
"packages": 2,
"page": 1,
"page_total": 1,
}
)
# Check that we do return the ACLs when we ask them
output = self.app.get('/api/packages/g*/?acls=True')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
data['packages'][0]['name'], 'geany')
self.assertEqual(
data['packages'][1]['name'], 'guake')
self.assertNotEqual(data['packages'][0]['acls'], [])
self.assertNotEqual(data['packages'][1]['acls'], [])
self.assertEqual(
data['packages'][0]['acls'][0]['collection']['branchname'],
'f18'
)
self.assertEqual(
data['packages'][1]['acls'][1]['collection']['branchname'],
'master'
)
self.assertEqual(
data['packages'][1]['acls'][0].keys(),
[
'status', 'point_of_contact', 'collection', 'acls',
'critpath', 'status_change',
]
)
output = self.app.get('/api/packages/g*/?limit=abc')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
data['packages'][0]['name'], 'geany')
self.assertEqual(
data['packages'][0]['status'], 'Approved')
self.assertEqual(
data['packages'][1]['name'], 'guake')
self.assertEqual(
data['packages'][1]['status'], 'Approved')
output = self.app.get('/api/packages/g*/?limit=5000')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
self.assertEqual(
data['packages'][0]['name'], 'geany')
self.assertEqual(
data['packages'][0]['status'], 'Approved')
self.assertEqual(
data['packages'][1]['name'], 'guake')
self.assertEqual(
data['packages'][1]['status'], 'Approved')
output = self.app.get('/api/packages/g*/?critpath=1')
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['error', 'output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 0)
self.assertEqual(data['output'], 'notok')
output = self.app.get('/api/packages/k*/?critpath=1')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 1)
self.assertEqual(data['packages'][0]['name'], 'kernel')
self.assertEqual(data['output'], 'ok')
output = self.app.get('/api/packages/g*/?critpath=0')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['output'], 'ok')
output = self.app.get('/api/packages/g*/?page=abc')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['error', 'output', 'page', 'page_total'])
self.assertEqual(data['error'], 'Wrong page provided')
self.assertEqual(data['output'], 'notok')
output = self.app.get('/api/packages/g*/?orphaned=False')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(data['output'], 'ok')
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['packages'][0]['name'], 'geany')
self.assertEqual(data['packages'][1]['name'], 'guake')
output = self.app.get('/api/packages/g*/?orphaned=True')
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['error', 'output', 'packages', 'page', 'page_total'])
self.assertEqual(
data['error'], 'No packages found for these parameters')
self.assertEqual(data['output'], 'notok')
self.assertEqual(data['packages'], [])
output = self.app.get('/api/packages/?pattern=guake&pattern=geany')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data.keys()),
['output', 'packages', 'page', 'page_total'])
self.assertEqual(data['output'], 'ok')
self.assertEqual(len(data['packages']), 2)
self.assertEqual(data['packages'][0]['name'], 'geany')
self.assertEqual(data['packages'][1]['name'], 'guake')
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.is_admin')
def test_api_package_edit(self, login_func, mock_func):
""" Test the api_package_edit function. """
login_func.return_value = None
# Redirect as you are not admin
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/edit/')
self.assertEqual(output.status_code, 302)
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/edit/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'error_detail', 'output']
)
self.assertEqual(
data['error'], "Invalid input submitted")
self.assertEqual(
data['output'], "notok")
self.assertEqual(
sorted(data['error_detail']),
[
"pkgname: This field is required.",
]
)
data = {
'namespace': 'rpms',
'pkgname': 'gnome-terminal',
'summary': 'Terminal emulator for GNOME',
'description': 'Terminal for GNOME...',
'review_url': 'http://bugzilla.redhat.com/1234',
'status': 'Approved',
'upstream_url': 'http://www.gnome.org/',
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/edit/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package of this name found",
"output": "notok"
}
)
create_package_acl(self.session)
create_package_critpath(self.session)
# Before edit:
output = self.app.get('/api/package/guake/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data['packages'][0]['package']['upstream_url'],
'http://guake.org'
)
data = {
'namespace': 'rpms',
'pkgname': 'guake',
'upstream_url': 'http://www.guake.org',
}
# User is not an admin
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/edit/', data=data)
self.assertEqual(output.status_code, 302)
# User is an admin
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/edit/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": ['Package "guake" edited'],
"output": "ok"
}
)
# After edit:
output = self.app.get('/api/package/guake/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data['packages'][0]['package']['upstream_url'],
'http://www.guake.org'
)
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.is_admin')
def test_api_package_critpath(self, login_func, mock_func):
""" Test the api_package_critpath function. """
login_func.return_value = None
# Redirect as you are not admin
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/')
self.assertEqual(output.status_code, 302)
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'error_detail', 'output']
)
self.assertEqual(
data['error'], "Invalid input submitted")
self.assertEqual(
data['output'], "notok")
self.assertEqual(
sorted(data['error_detail']),
[
'branches: This field is required.',
'pkgnames: This field is required.'
]
)
data = {
'namespace': 'rpms',
'pkgnames': 'gnome-terminal',
'branches': 'master'
}
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found by this name: "
"rpms/gnome-terminal",
"output": "notok"
}
)
create_package_acl(self.session)
create_package_critpath(self.session)
# Before edit:
output = self.app.get('/api/package/guake/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
for pkg in data['packages']:
self.assertFalse(pkg['critpath'])
self.assertFalse(pkg['critpath'])
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['master', 'f18'],
}
# User is an admin - But not updating the critpath
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Nothing to update",
"output": "notok"
}
)
# Still no update
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['master', 'f18'],
'critpath': False,
}
output = self.app.post('/api/package/critpath/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Nothing to update",
"output": "notok"
}
)
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['foobar'],
}
# User is an admin - But not invalid collection the critpath
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/', data=data)
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No collection found by the name of foobar",
"output": "notok"
}
)
data = {
'namespace': 'rpms',
'pkgnames': 'guake',
'branches': ['master', 'f18'],
'critpath': True,
}
# User is an admin and updating the critpath
user = FakeFasUserAdmin()
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/critpath/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'messages': [
'rpms/guake: critpath updated on master to True',
'rpms/guake: critpath updated on f18 to True'
],
'output': 'ok'
}
)
# After edit:
output = self.app.get('/api/package/rpms/guake/', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
for pkg in data['packages']:
self.assertTrue(pkg['critpath'])
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.is_admin')
def test_api_monitor_package(self, login_func, mock_func):
""" Test the api_monitor_package function. """
login_func.return_value = None
user = FakeFasUser()
# No package
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/rpms/guake/monitor/1')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'output']
)
self.assertEqual(
data['error'], "No package found by this name")
self.assertEqual(
data['output'], "notok")
create_package_acl(self.session)
create_package_critpath(self.session)
# User is not a packager
user.username = 'Toshio'
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/rpms/guake/monitor/1')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'output']
)
self.assertEqual(
data['error'],
"You are not allowed to update the monitor flag on this "
"package")
self.assertEqual(
data['output'], "notok")
# Works
user.username = 'pingou'
with user_set(pkgdb2.APP, user):
# Ensure that GETs show that it is *not* monitored
output = self.app.get('/api/package/rpms/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data['packages'][0]['package']['monitor'], False)
output = self.app.post('/api/package/rpms/guake/monitor/1')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'],
"Monitoring status of rpms/guake set to True")
self.assertEqual(
data['output'], "ok")
output = self.app.post('/api/package/rpms/guake/monitor/1')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'], "Monitoring status un-changed")
self.assertEqual(
data['output'], "ok")
# Ensure that subsequent GETs show that it is monitored
output = self.app.get('/api/package/rpms/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(data['packages'][0]['package']['monitor'], True)
# User is not a packager but is admin
user = FakeFasUserAdmin()
user.username = 'Toshio'
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/rpms/guake/monitor/False')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'],
"Monitoring status of rpms/guake set to False")
self.assertEqual(
data['output'], "ok")
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.is_admin')
def test_api_koschei_package(self, login_func, mock_func):
""" Test the api_koschei_package function. """
login_func.return_value = None
user = FakeFasUser()
# No package
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/rpms/guake/koschei/1')
self.assertEqual(output.status_code, 500)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['error', 'output']
)
self.assertEqual(
data['error'], "No package found by this name")
self.assertEqual(
data['output'], "notok")
create_package_acl(self.session)
create_package_critpath(self.session)
# User is not a packager
user.username = 'Toshio'
user.groups = ['sysadmin']
with user_set(pkgdb2.APP, user):
output = self.app.post(
'/api/package/rpms/guake/koschei/1', follow_redirects=True)
self.assertEqual(output.status_code, 200)
self.assertIn(
'<li class="errors">You must be a packager</li>', output.data)
# Works
user.username = 'pingou'
user.groups = ['packager']
with user_set(pkgdb2.APP, user):
# Ensure that GETs show that it is *not* monitored
output = self.app.get('/api/package/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data['packages'][0]['package']['koschei_monitor'], False)
output = self.app.post('/api/package/rpms/guake/koschei/1')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'],
"Koschei monitoring status of rpms/guake set to True")
self.assertEqual(
data['output'], "ok")
output = self.app.post('/api/package/rpms/guake/koschei/1')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'], "Koschei monitoring status un-changed")
self.assertEqual(
data['output'], "ok")
# Ensure that subsequent GETs show that it is monitored
output = self.app.get('/api/package/rpms/guake/')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data['packages'][0]['package']['koschei_monitor'], True)
# User is not a packager but is admin
user = FakeFasUserAdmin()
user.username = 'Toshio'
with user_set(pkgdb2.APP, user):
output = self.app.post('/api/package/rpms/guake/koschei/False')
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
sorted(data),
['messages', 'output']
)
self.assertEqual(
data['messages'],
"Koschei monitoring status of rpms/guake set to False")
self.assertEqual(
data['output'], "ok")
@patch('pkgdb2.lib.utils')
def test_api_package_request(self, utils_mock):
""" Test the api_package_request function. """
# Ensure there are no actions before
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 0)
create_collection(self.session)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
# Incomplete request
data = {
'pkgname': 'guake',
'summary': 'Drop-down terminal for GNOME',
'branches': ['foobar'],
}
output = self.app.post('/api/request/package', data=data)
self.assertEqual(output.status_code, 400)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"branches: 'foobar' is not a valid choice for this field",
"review_url: This field is required."
],
"output": "notok"
}
)
# User not a packager
data = {
'pkgname': 'guake',
'summary': 'Drop-down terminal for GNOME',
'review_url': 'https://bugzilla.redhat.com/450189',
'branches': ['master', 'f18'],
'namespace': 'rpms',
}
output = self.app.post('/api/request/package', data=data)
self.assertEqual(output.status_code, 400)
data = json.loads(output.data)
self.assertEqual(
data,
{
'error': 'User "pingou" is not in the packager group',
'output': 'notok'
}
)
# Working - Asking for 1 branch only but getting `master` as well
utils_mock.get_packagers.return_value = ['pingou', 'ralph']
utils_mock.log.return_value = \
'user: pingou request package: guake on branch <branch>'
data = {
'pkgname': 'guake',
'summary': 'Drop-down terminal for GNOME',
'review_url': 'https://bugzilla.redhat.com/450189',
'branches': ['f18'],
'namespace': 'rpms',
}
output = self.app.post('/api/request/package', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'messages': [
'user: pingou request package: guake on branch <branch>',
'user: pingou request package: guake on branch <branch>',
],
'output': 'ok'
}
)
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 2)
self.assertEqual(actions[0].action, 'request.package')
self.assertEqual(actions[1].action, 'request.package')
self.assertEqual(actions[0].collection.branchname, 'f18')
self.assertEqual(actions[1].collection.branchname, 'master')
self.assertEqual(actions[0].package, None)
self.assertEqual(actions[1].package, None)
self.assertEqual(actions[0].info_data['pkg_name'], 'guake')
self.assertEqual(actions[1].info_data['pkg_name'], 'guake')
# Check with providing a bug number instead of the full URL
with user_set(pkgdb2.APP, user):
utils_mock.log.return_value = \
'user: pingou request package: terminator on branch master'
data = {
'pkgname': 'terminator',
'summary': 'Terminal for GNOME',
'review_url': '123',
'branches': ['master'],
'namespace': 'rpms',
'monitoring_status': 'True',
}
output = self.app.post('/api/request/package', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'messages': [
'user: pingou request package: terminator on branch master',
],
'output': 'ok'
}
)
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 3)
action = pkgdblib.get_admin_action(self.session, 3)
self.assertEqual(action.action, 'request.package')
self.assertEqual(action.collection.branchname, 'master')
self.assertEqual(action.package, None)
self.assertEqual(action.info_data['pkg_name'], 'terminator')
self.assertEqual(
action.info_data['pkg_review_url'],
'https://bugzilla.redhat.com/123'
)
# Check with an URL not matching expectations
with user_set(pkgdb2.APP, user):
utils_mock.log.return_value = \
'user: pingou request package: foo on branch master'
data = {
'pkgname': 'foo',
'summary': 'bar',
'review_url': 'http://bz.rh.c/123',
'branches': ['master'],
'namespace': 'rpms',
}
output = self.app.post('/api/request/package', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'messages': [
'user: pingou request package: foo on branch master',
],
'output': 'ok'
}
)
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 4)
action = pkgdblib.get_admin_action(self.session, 4)
self.assertEqual(action.action, 'request.package')
self.assertEqual(action.collection.branchname, 'master')
self.assertEqual(action.package, None)
self.assertEqual(action.info_data['pkg_name'], 'foo')
self.assertEqual(
action.info_data['pkg_review_url'], 'http://bz.rh.c/123')
@patch('pkgdb2.lib.utils')
def test_api_branch_request(self, utils_mock):
""" Test the api_branch_request function. """
# Ensure there are no actions before
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 0)
create_package_acl(self.session)
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
# Invalid package
data = {
'branches': ['foobar'],
}
output = self.app.post('/api/request/branch/rpms/foo', data=data)
self.assertEqual(output.status_code, 404)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "No package found: rpms/foo",
"output": "notok"
}
)
# Invalid request
data = {
'branches': ['foobar'],
}
output = self.app.post('/api/request/branch/rpms/guake', data=data)
self.assertEqual(output.status_code, 400)
data = json.loads(output.data)
self.assertEqual(
data,
{
"error": "Invalid input submitted",
"error_detail": [
"branches: 'foobar' is not a valid choice for this field"
],
"output": "notok"
}
)
# User not a packager
data = {
'branches': ['f17'],
}
output = self.app.post('/api/request/branch/rpms/guake', data=data)
self.assertEqual(output.status_code, 400)
data = json.loads(output.data)
self.assertEqual(
data,
{
'error': 'User "pingou" is not in the packager group',
'output': 'notok'
}
)
# Working - Fedora branches are directly created
utils_mock.get_packagers.return_value = ['pingou', 'ralph']
utils_mock.log.return_value = \
'user: pingou request package: guake on branch <branch>'
data = {
'branches': ['f17'],
}
output = self.app.post('/api/request/branch/rpms/guake', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
'messages': [
'Branch f17 created for user pingou',
],
'output': 'ok'
}
)
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 0)
# Working - EPEL branches go through validation
data = {
'branches': ['el6'],
}
output = self.app.post('/api/request/branch/rpms/guake', data=data)
self.assertEqual(output.status_code, 200)
data = json.loads(output.data)
self.assertEqual(
data,
{
"messages": [
"Branch el6 requested for user pingou"
],
"output": "ok"
}
)
actions = pkgdblib.search_actions(self.session)
self.assertEqual(len(actions), 1)
self.assertEqual(actions[0].action, 'request.branch')
self.assertEqual(actions[0].collection.branchname, 'el6')
self.assertEqual(actions[0].package.name, 'guake')
self.assertEqual(actions[0].info_data, {})
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(FlaskApiPackagesTest)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
#!/usr/bin/env python
#coding=utf-8
import json
import base64
import urllib
import urllib2
import ConfigParser
import subprocess
from baidu_nlu.srv import *
import rospy
'''
Baidu ASR Service need to register in http://yuyin.baidu.com
create your application, after that you will have your cuid,apikey,secretkey
all the information write in the nlu.cfg file like this:
[baidu]
CUID = your cuid
API_KEY = your application key
SECRET_KEY = your secret key
'''
def get_config():
config = ConfigParser.ConfigParser()
config.read('nlu.cfg')
return config.get('baidu','CUID'),config.get('baidu','API_KEY'),config.get('baidu','SECRET_KEY')
def get_baidu_auth():
CUID,API_KEY,SECRET_KEY = get_config()
auth_url = 'https://openapi.baidu.com/oauth/2.0/token?grant_type=client_credentials&client_id='+API_KEY+'&client_secret='+SECRET_KEY
res = json.loads(download(auth_url))
return res['access_token']
def download(link,data=None,headers={}):
try:
req = urllib2.Request(link,data,headers)
response = urllib2.urlopen(req,None,15)
res = response.read()
except:
return None
return res
def asr_test(msg):
CUID,API_KEY,SECRET_KEY = get_config()
subprocess.call('''arecord -r 16000 -f S16_LE -D 'plughw:1,0' -d 3 > in.wav''',shell=True)
with open('in.wav','rb') as fr:
content = fr.read()
base_data = base64.b64encode(content)
params = {}
params['format'] = 'wav'
params['rate'] = 16000
params['channel'] = 1
params['token'] = access_token
params['cuid'] = CUID
params['len'] = len(content)
params['speech'] = base_data
data = json.dumps(params)
headers = {
"Content-Length":len(data),
"Content-Type":"application/json; charset=utf-8",
}
url ='http://vop.baidu.com/server_api'
res = json.loads(download(url,data,headers))
return res['result'][0].encode('utf-8')
def handle_asr(req):
print 'request is ',req.controller_json
return ASRResponse(asr_test(req.controller_json))
def asr_server():
rospy.init_node('asr_server')
s = rospy.Service('asr',ASR,handle_asr)
print 'ready to listen'
rospy.spin()
if __name__ == '__main__':
access_token = get_baidu_auth()
asr_server()
|
"""
# Copyright (C) 2016 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import absolute_import
import time
import unittest
import mock
import gateway.hal.master_controller_classic
import master.classic.master_api
import master.classic.master_communicator
from gateway.dto import InputDTO, OutputDTO, OutputStatusDTO
from gateway.dto.input import InputStatusDTO
from gateway.hal.master_controller_classic import MasterClassicController
from gateway.hal.master_event import MasterEvent
from gateway.pubsub import PubSub
from ioc import INJECTED, Inject, Scope, SetTestMode, SetUpTestInjections
from master.classic import master_api
from master.classic.eeprom_controller import EepromController
from master.classic.eeprom_models import InputConfiguration
from master.classic.master_communicator import BackgroundConsumer
from master.classic.validationbits import ValidationBitStatus
from master.classic.master_communicator import MasterCommunicator
class MasterClassicControllerTest(unittest.TestCase):
""" Tests for MasterClassicController. """
@classmethod
def setUpClass(cls):
SetTestMode()
def test_input_module_type(self):
input_data = {'id': 1, 'module_type': 'I'}
controller = get_classic_controller_dummy([
InputConfiguration.deserialize(input_data)
])
data = controller.get_input_module_type(1)
self.assertEqual(data, 'I')
def test_load_input(self):
input_data = {'id': 1, 'module_type': 'I', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
controller = get_classic_controller_dummy([
InputConfiguration.deserialize(input_data)
])
data = controller.load_input(1)
self.assertEqual(data.id, 1)
def test_load_input_with_invalid_type(self):
input_data = {'id': 1, 'module_type': 'O', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
controller = get_classic_controller_dummy([
InputConfiguration.deserialize(input_data)
])
self.assertRaises(TypeError, controller.load_input, 1)
def test_load_inputs(self):
input_data1 = {'id': 1, 'module_type': 'I', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
input_data2 = {'id': 2, 'module_type': 'I', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
controller = get_classic_controller_dummy([
InputConfiguration.deserialize(input_data1),
InputConfiguration.deserialize(input_data2)
])
inputs = controller.load_inputs()
self.assertEqual([x.id for x in inputs], [1, 2])
def test_load_inputs_skips_invalid_type(self):
input_data1 = {'id': 1, 'module_type': 'I', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
input_data2 = {'id': 2, 'module_type': 'O', 'name': 'foo', 'action': 255,
'basic_actions': '', 'invert': 255, 'can': ' '}
controller = get_classic_controller_dummy([
InputConfiguration.deserialize(input_data1),
InputConfiguration.deserialize(input_data2)
])
inputs = controller.load_inputs()
self.assertEqual([x.id for x in inputs], [1])
def test_input_event_consumer(self):
with mock.patch.object(gateway.hal.master_controller_classic, 'BackgroundConsumer',
return_value=None) as consumer:
controller = get_classic_controller_dummy()
controller._register_version_depending_background_consumers()
expected_call = mock.call(master.classic.master_api.input_list((3, 143, 102)), 0, mock.ANY)
self.assertIn(expected_call, consumer.call_args_list)
def test_subscribe_input_events(self):
consumer_list = []
def new_consumer(*args):
consumer = BackgroundConsumer(*args)
consumer_list.append(consumer)
return consumer
subscriber = mock.Mock()
with mock.patch.object(gateway.hal.master_controller_classic, 'BackgroundConsumer',
side_effect=new_consumer) as new_consumer:
controller = get_classic_controller_dummy()
pubsub = get_pubsub()
controller._register_version_depending_background_consumers()
controller._input_config = {1: InputDTO(id=1)} # TODO: cleanup
pubsub.subscribe_master_events(PubSub.MasterTopics.INPUT, subscriber.callback)
new_consumer.assert_called()
consumer_list[-2].deliver({'input': 1})
pubsub._publish_all_events()
try:
consumer_list[-2]._consume()
except:
pass # Just ensure it has at least consumed once
expected_event = MasterEvent.deserialize({'type': 'INPUT_CHANGE',
'data': {'state': InputStatusDTO(id=1, status=True)}})
subscriber.callback.assert_called_with(expected_event)
def test_load_input_status(self):
controller = get_classic_controller_dummy()
def _do_command(cmd, fields=None, **kwargs):
if cmd == master_api.number_of_io_modules():
return {'in': 1}
elif cmd == master_api.read_input_module(controller._master_version):
return {'input_status': 0b10110111}
controller._master_communicator.do_command.side_effect = _do_command
with mock.patch.object(MasterClassicController, 'get_input_module_type') as get_input_module_type:
get_input_module_type.side_effect = lambda x: 'I'
input_statuses = controller.load_input_status()
self.assertListEqual([InputStatusDTO(0, status=True),
InputStatusDTO(1, status=True),
InputStatusDTO(2, status=True),
InputStatusDTO(3, status=False),
InputStatusDTO(4, status=True),
InputStatusDTO(5, status=True),
InputStatusDTO(6, status=False),
InputStatusDTO(7, status=True)],
input_statuses)
def test_master_output_event(self):
events = []
def _on_event(master_event):
events.append(master_event)
classic = get_classic_controller_dummy()
pubsub = get_pubsub()
pubsub.subscribe_master_events(PubSub.MasterTopics.OUTPUT, _on_event)
classic._output_config = {0: OutputDTO(id=0),
1: OutputDTO(id=1),
2: OutputDTO(id=2, room=3)}
pubsub._publish_all_events()
events = []
classic._on_master_output_event({'outputs': [(0, 0), (2, 5)]})
pubsub._publish_all_events()
self.assertEqual(events, [MasterEvent('OUTPUT_STATUS', {'state': OutputStatusDTO(id=0, status=True, dimmer=0)}),
MasterEvent('OUTPUT_STATUS', {'state': OutputStatusDTO(id=1, status=False)}),
MasterEvent('OUTPUT_STATUS', {'state': OutputStatusDTO(id=2, status=True, dimmer=5)})])
def test_validation_bits_passthrough(self):
# Important note: bits are ordened per byte, so the sequence is like:
# [[7, 6, 5, 4, 3, 2, 1, 0], [15, 14, 13, 12, 11, 10, 9, 8], [23, 22, ...], ...]
bit_data = [0b00000010, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b00000000,
0b00000000, 0b00000000, 0b00000000, 0b01000000]
def _do_command(cmd, fields):
start = fields['number'] // 8
return {'data': bit_data[start:start + 11]}
classic = get_classic_controller_dummy()
classic._master_communicator.do_command = _do_command
classic._master_version = (0, 0, 0)
pubsub = get_pubsub()
bits = classic.load_validation_bits()
self.assertIsNone(bits)
classic._master_version = (3, 143, 102)
bits = classic.load_validation_bits()
expected_bits = {i: False for i in range(256)}
expected_bits[1] = True
expected_bits[254] = True
self.assertEqual(expected_bits, bits)
events = []
def _on_event(master_event):
if master_event.type == MasterEvent.Types.OUTPUT_STATUS:
events.append(master_event.data)
pubsub.subscribe_master_events(PubSub.MasterTopics.OUTPUT, _on_event)
classic._validation_bits = ValidationBitStatus(on_validation_bit_change=classic._validation_bit_changed)
classic._output_config = {0: OutputDTO(0, lock_bit_id=5)}
pubsub._publish_all_events()
classic._refresh_validation_bits()
classic._on_master_validation_bit_change(5, True)
classic._on_master_validation_bit_change(6, True)
classic._on_master_validation_bit_change(5, False)
pubsub._publish_all_events()
self.assertEqual(events, [{'state': OutputStatusDTO(id=0, locked=False)},
{'state': OutputStatusDTO(id=0, locked=True)},
{'state': OutputStatusDTO(id=0, locked=False)}])
def test_module_discover(self):
subscriber = mock.Mock()
subscriber.callback.return_value = None
with mock.patch.object(MasterClassicController, '_synchronize') as synchronize:
controller = get_classic_controller_dummy([])
pubsub = get_pubsub()
invalidate = controller._eeprom_controller.invalidate_cache.call_args_list
try:
controller.start()
controller.module_discover_start(30)
time.sleep(0.2)
assert len(synchronize.call_args_list) == 1
assert len(invalidate) == 0
pubsub.subscribe_master_events(PubSub.MasterTopics.MODULE, subscriber.callback)
controller.module_discover_stop()
pubsub._publish_all_events()
time.sleep(0.2)
assert len(invalidate) == 1
assert len(subscriber.callback.call_args_list) == 1
event = subscriber.callback.call_args_list[0][0][0]
assert event.type == MasterEvent.Types.MODULE_DISCOVERY
finally:
controller.stop()
def test_module_discover_timeout(self):
controller = get_classic_controller_dummy()
with mock.patch.object(controller, 'module_discover_stop') as stop:
controller.module_discover_start(0)
time.sleep(0.2)
stop.assert_called_with()
def test_master_maintenance_event(self):
controller = get_classic_controller_dummy()
pubsub = get_pubsub()
with mock.patch.object(controller._eeprom_controller, 'invalidate_cache') as invalidate:
master_event = MasterEvent(MasterEvent.Types.MAINTENANCE_EXIT, {})
pubsub.publish_master_event(PubSub.MasterTopics.MAINTENANCE, master_event)
pubsub._publish_all_events()
invalidate.assert_called()
def test_master_eeprom_event(self):
controller = get_classic_controller_dummy()
controller._shutters_last_updated = 1603178386.0
pubsub = get_pubsub()
master_event = MasterEvent(MasterEvent.Types.EEPROM_CHANGE, {})
pubsub.publish_master_event(PubSub.MasterTopics.EEPROM, master_event)
pubsub._publish_all_events()
assert controller._shutters_last_updated == 0.0
def test_all_lights_off(self):
controller = get_classic_controller_dummy()
controller.set_all_lights('OFF')
controller._master_communicator.do_command.assert_called_with(mock.ANY,
fields={'action_type': 163, 'action_number': 0},
timeout=2)
controller.set_all_lights('ON')
controller._master_communicator.do_command.assert_called_with(mock.ANY,
fields={'action_type': 172, 'action_number': 255},
timeout=2)
controller.set_all_lights('TOGGLE')
controller._master_communicator.do_command.assert_called_with(mock.ANY,
fields={'action_type': 173, 'action_number': 255},
timeout=2)
def test_set_input(self):
controller = get_classic_controller_dummy()
controller.set_input(100, True)
controller._master_communicator.do_command.assert_called_with(mock.ANY,
fields={'action_type': 68, 'action_number': 100},
timeout=2)
controller.set_input(100, False)
controller._master_communicator.do_command.assert_called_with(mock.ANY,
fields={'action_type': 69, 'action_number': 100},
timeout=2)
with self.assertRaises(ValueError):
controller.set_input(255, True)
@Scope
def get_classic_controller_dummy(inputs=None):
communicator_mock = mock.Mock(spec=MasterCommunicator)
eeprom_mock = mock.Mock(EepromController)
eeprom_mock.invalidate_cache.return_value = None
eeprom_mock.read.return_value = inputs[0] if inputs else []
eeprom_mock.read_all.return_value = inputs
SetUpTestInjections(configuration_controller=mock.Mock(),
master_communicator=communicator_mock,
eeprom_controller=eeprom_mock,
pubsub=PubSub())
controller = MasterClassicController()
controller._master_version = (3, 143, 102)
return controller
@Inject
def get_pubsub(pubsub=INJECTED):
return pubsub
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#########################################################################
# File Name: numpyIntro.py
# Author: lpqiu
# mail: qlp_1018@126.com
# Created Time: 2014年09月06日 星期六 16时33分05秒
#########################################################################
import numpy as np
def triangleWave(x, c, c0, hc=1.0):
x = x - int(x)
ret = 0
if x >= c:
ret = 0
elif x < c0:
ret = (hc/c0)*x
else:
ret = (hc/(c0 -c))*(x - c)
return ret
def triangleFunc(x, c, c0, hc=1.0):
def trgFun(x):
x = x - int(x)
ret = 0
if x >= c:
ret = 0
elif x < c0:
ret = (hc/c0)*x
else:
ret = (hc/(c0 -c))*(x - c)
return ret
return np.frompyfunc(trgFun, 1, 1)
if __name__=="__main__":
x = np.linspace(0, 2, 1000)
y = np.array([triangleWave(t, 0.6, 0.4, 1.0) for t in x])
triangleFun = np.frompyfunc(lambda x: triangleWave(0.6, 0.4, 1.0), 1, 1)
y2 = triangleFun(x)
y3 = triangleFunc(0.6, 0.4, 1.0)(x)
|
#!/usr/bin/env python
################################################
# Prey Configurator for Linux
# By Tomas Pollak
# (c) 2010 - Fork Ltd. (usefork.com)
################################################
# if having trouble with the GTK theme as root, do this:
# sudo ln -s ~/.themes/ /root/.themes
################################################
# base includes
################################################
import pygtk
pygtk.require("2.0")
import gtk
import os
# from xml.dom.minidom import parseString
import re
import urllib
app_name = 'prey-config'
lang_path = 'lang'
script_path = os.sys.path[0]
################################################
# gettext localization
################################################
import locale
import gettext
# locale.setlocale(locale.LC_ALL, '')
# locale.bindtextdomain(app_name, lang_path)
gettext.bindtextdomain(app_name, lang_path)
gettext.textdomain(app_name)
_ = gettext.gettext
################################################
# vars and such
################################################
PREY_PATH = '/usr/share/prey'
CONFIG_FILE = PREY_PATH + '/config'
CONTROL_PANEL_URL = 'http://control.preyproject.com'
CONTROL_PANEL_URL_SSL = 'https://control.preyproject.com'
GUEST_ACCOUNT_NAME = 'guest_account'
VERSION = os.popen("cat " + PREY_PATH + "/version 2> /dev/null").read().strip().replace('version=', '').replace("'",'')
PAGES = ['report_options', 'control_panel_options', 'new_user', 'existing_user', 'existing_device', 'standalone_options']
class PreyConfigurator(object):
################################################
# helper functions
################################################
def get(self, name):
return self.root.get_object(name)
def text(self, name):
return self.get(name).get_text()
def checkbox(self, name):
if self.get(name).get_active() == True:
return 'y'
else:
return 'n'
################################################
# validations
################################################
def validate_email(self, string):
if len(string) > 7:
if re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", string) != None:
return True
return False
def validate_fields(self):
if self.text('user_name') == '':
self.show_alert(_("Empty name!"), _("Please type in your name."))
return False
if self.validate_email(self.text('email')) == False:
self.show_alert(_("Invalid email"), _("Please make sure the email address you typed is valid."))
return False
if len(self.text('password')) < 6:
self.show_alert(_("Bad password"), _("Password should contain at least 6 chars. Please try again."))
return False
elif self.text('password') != self.text('password_confirm'):
self.show_alert(_("Passwords don't match"), _("Please make sure both passwords match!"))
return False
return True
################################################
# dialogs
################################################
def show_alert(self, title, message, quit = False):
dialog = gtk.MessageDialog(
parent = None,
flags = gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
type = gtk.MESSAGE_INFO,
buttons = gtk.BUTTONS_OK,
message_format = message)
dialog.set_title(title)
if quit == True:
dialog.connect('response', lambda dialog, response: gtk.main_quit())
else:
dialog.connect('response', lambda dialog, response: dialog.destroy())
self.center_dialog(dialog)
dialog.show()
def show_question(self, title, message):
dialog = gtk.MessageDialog(
parent = None,
flags = gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
type = gtk.MESSAGE_QUESTION,
buttons = gtk.BUTTONS_YES_NO,
message_format = message)
dialog.set_title(title)
self.center_dialog(dialog)
response = dialog.run()
dialog.destroy()
return response
def show_about(self):
dialog = self.get('about_prey_config')
self.center_dialog(dialog)
dialog.show()
def close_about(self, dialog, response):
dialog.hide()
def center_dialog(self, dialog):
if 'window' in self.__dict__:
dialog.set_transient_for(self.window)
dialog.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
################################################
# window and widget management
################################################
def get_page_name(self):
return PAGES[self.pages.get_current_page()]
def toggle_pg3_next_apply(self, button):
button_next = self.get('button_next')
button_apply = self.get('button_apply')
if self.get('use_existing_device').get_active() == False:
button_next.hide()
button_apply.show()
button_apply.grab_default()
else:
button_apply.hide()
button_next.show()
button_next.grab_default()
def next_page(self, button):
page_name = self.get_page_name()
increment = 1
if page_name == 'control_panel_options' and self.get('new_user_option').get_active() == False:
increment = 2
if page_name == 'report_options':
if self.get('reporting_mode_cp').get_active() == True:
if self.current_api_key != '':
response = self.show_question(_("Hold your horses!"), _("Your device seems to be already synchronized with the Control Panel! Do you want to re-setup your account? (Not recommended)"))
if response == gtk.RESPONSE_NO:
return
else:
increment = 5
if page_name == 'existing_user': # then we are going to select an exising device
if not self.get_existing_user(True):
# login didn't work, so don't go to next page
return
self.pages.set_current_page(self.pages.get_current_page() + increment)
self.toggle_buttons(button, None, 1)
def prev_page(self, button):
page_name = self.get_page_name()
decrement = 1
if page_name == 'existing_user':
decrement = 2
elif page_name == 'standalone_options':
decrement = 5
if self.pages.get_current_page() != 0:
self.pages.set_current_page(self.pages.get_current_page() - decrement)
self.toggle_buttons(button, None, 1)
def toggle_buttons(self, button, tab, tab_number):
button_prev = self.get('button_prev')
button_next = self.get('button_next')
button_apply = self.get('button_apply')
if tab_number == 0: #main settings tab
button_prev.hide()
button_next.hide()
button_apply.show()
self.hide_ssl()
else:
page_name = self.get_page_name()
if page_name == 'report_options':
button_prev.hide()
else:
button_prev.show()
if page_name == 'report_options' or page_name == 'control_panel_options' or (page_name == 'existing_user' and self.get('use_existing_device').get_active() == True):
button_apply.hide()
button_next.show()
button_next.grab_default()
else:
button_next.hide()
button_apply.show()
button_apply.grab_default()
if self.get_page_name() == 'new_user' or self.get_page_name() == 'existing_user':
self.show_ssl()
else:
self.hide_ssl()
def hide_ssl(self):
self.get('icon_ssl').hide()
self.get('lbl_ssl').hide()
def show_ssl(self):
self.get('icon_ssl').show()
self.get('lbl_ssl').show()
def set_default_action(self,button,ctrl):
button_cancel = self.get('button_cancel')
cancel_has_default = button_cancel.flags() & gtk.HAS_DEFAULT
button_prev = self.get('button_prev')
prev_has_default = button_prev.flags() & gtk.HAS_DEFAULT
button_next = self.get('button_next')
button_apply = self.get('button_apply')
if not cancel_has_default and not prev_has_default:
if button_next.flags() & gtk.VISIBLE:
button_next.grab_default()
else:
button_apply.grab_default()
def ensure_visible(self,widget,event): #ensure the widget focused is visible in the scroll window
self.get('delay').set_name('delay')
self.get('extended_headers').set_name('extended_headers')
widget_name = widget.get_name()
scrollwindow = self.get('main_settings_scrollwindow')
internal_height = self.get('main_settings').get_size()[1]
port_height = scrollwindow.allocation.height
port_vadjust = scrollwindow.get_vadjustment()
port_posn = port_vadjust.value
widget_posn = widget.allocation.y
widget_height = widget.allocation.height
if (widget_posn - port_posn) >= 0 and (widget_posn + widget_height - port_posn) <= port_height:
#widget is fully visible (even if its description or icon is not), so do nothing
return False
# for now we know there are only two possible hidden widgets so we scroll all the way up or all the way down
# if we add options to this page we will have to scroll differently
if widget_name == 'delay':
#scroll to top
port_vadjust.set_value(0)
elif widget_name == 'extended_headers':
#scroll to bottom
port_vadjust.set_value(internal_height - port_height)
return True
def key_pressed(self, widget, event):
# show about dialog on F1 keypress
if (event.keyval == gtk.keysyms.F1) \
and (event.state & gtk.gdk.CONTROL_MASK) == 0 \
and (event.state & gtk.gdk.SHIFT_MASK) == 0:
self.show_about()
return True
return False
################################################
# setting getting
################################################
def prey_exists(self):
if not os.path.exists(PREY_PATH + '/core'):
self.show_alert(_("Prey not installed"), _("Couldn't find a Prey installation on this system. Sorry."), True)
else:
return True
def is_config_writable(self):
command = 'if [ ! -w "'+PREY_PATH+'/config" ]; then echo 1; fi'
no_access = os.popen(command).read().strip()
if no_access == '1':
self.show_alert(_("Unauthorized"), _("You don't have access to manage Prey's configuration. Sorry."), True)
else:
return True
def get_setting(self, var):
command = 'grep \''+var+'=\' '+CONFIG_FILE+' | sed "s/'+var+'=\'\(.*\)\'/\\1/"'
return os.popen(command).read().strip()
def get_current_settings(self):
self.current_delay = os.popen("crontab -l | grep prey | cut -c 3-4").read()
if not self.current_delay: self.current_delay = 20
self.current_auto_connect = self.get_setting('auto_connect')
self.current_extended_headers = self.get_setting('extended_headers')
self.current_guest_account = self.guest_account_exists()
self.current_lang = self.get_setting('lang')
self.current_check_url = self.get_setting('check_url')
self.current_post_method = self.get_setting('post_method')
self.current_api_key = self.get_setting('api_key')
self.current_device_key = self.get_setting('device_key')
self.current_mail_to = self.get_setting('mail_to')
self.current_smtp_server = self.get_setting('smtp_server')
self.current_smtp_username = self.get_setting('smtp_username')
def guest_account_exists(self):
result = os.popen('id ' + GUEST_ACCOUNT_NAME + ' 2> /dev/null').read()
if result.find("uid"):
return False
else:
return True
def toggle_guest_account(self, enabled):
if enabled:
# create user and leave password blank
os.system("useradd -m " + GUEST_ACCOUNT_NAME + "; passwd -d " + GUEST_ACCOUNT_NAME)
# Authorize login with no passwords in gdm
os.system("sed -i 's/PasswordRequired=false/#PasswordRequired=false/' /etc/gdm/gdm.conf")
# Authorize login with no passwords in pam
os.system("sed -i 's/nullok_secure/nullok/' /etc/pam.d/common-auth")
else:
os.system("userdel -r " + GUEST_ACCOUNT_NAME)
os.system("sed -i 's/#PasswordRequired=false/PasswordRequired=false/' /etc/gdm/gdm.conf")
os.system("sed -i 's/nullok/nullok_secure/' /etc/pam.d/common-auth")
def display_real_settings(self):
self.get('delay').set_value(int(self.current_delay))
self.get('guest_account').set_active(self.current_guest_account)
if self.current_auto_connect == 'y':
self.get('auto_connect').set_active(True)
if self.current_extended_headers == 'y':
self.get('extended_headers').set_active(True)
self.get('check_url').set_text(self.current_check_url)
self.get('mail_to').set_text(self.current_mail_to)
self.get('smtp_server').set_text(self.current_smtp_server)
self.get('smtp_username').set_text(self.current_smtp_username)
if self.current_post_method == 'email':
self.get('reporting_mode_standalone').set_active(True)
def check_if_configured(self):
if self.current_post_method == 'http' and self.current_api_key == '':
self.show_alert(_('Welcome!'), _("It seems this is the first time you run this setup. Please set up your reporting method now, otherwise Prey won't work!"))
################################################
# setting settings
################################################
def save(self, param, value):
if param == 'check_url': value = value.replace('/', '\/')
command = 'sed -i -e "s/'+param+'=\'.*\'/'+param+'=\''+value+'\'/" '+ CONFIG_FILE
os.system(command)
def apply_settings(self, button):
self.get('button_apply').set_label(_("Saving..."))
if self.get("main_tabs").get_current_page() == 0: # main settings page
self.apply_main_settings()
else:
page_name = self.get_page_name()
if page_name == 'new_user':
if self.validate_fields():
self.create_user()
elif page_name == "existing_user": # this is an apply event, so we are creating a new device (no "advanced" device selection)
self.get_existing_user(False)
elif page_name == "existing_device":
self.apply_device_settings()
elif page_name == "standalone_options":
self.apply_standalone_settings()
self.get('button_apply').set_label('gtk-apply')
def apply_main_settings(self):
# save('lang', text('lang'))
self.save('auto_connect', self.checkbox('auto_connect'))
self.save('extended_headers', self.checkbox('extended_headers'))
if((self.checkbox('guest_account') == 'y') != self.current_guest_account):
self.toggle_guest_account(self.checkbox('guest_account') == 'y')
# check and change the crontab interval
new_delay = self.get('delay').get_value_as_int()
if new_delay != int(self.current_delay):
# print 'Updating delay in crontab...'
os.system('(crontab -l | grep -v prey; echo "*/'+str(new_delay)+' * * * * /usr/share/prey/prey.sh > /var/log/prey.log") | crontab -')
if self.check_if_configured == False:
self.show_alert(_("All good."), _("Configuration saved. Remember you still need to set up your posting method, otherwise Prey won't work!"))
else:
self.show_alert(_("All good."), _("Configuration saved!"), True)
def apply_control_panel_settings(self):
if self.current_post_method != 'http':
self.save('post_method', 'http')
if self.current_check_url != CONTROL_PANEL_URL:
self.save('check_url', CONTROL_PANEL_URL)
# we could eventually use the email as a checking method to remove prey
# i.e. "under which email was this account set up?"
# self.save('mail_to', self.email)
self.save('api_key', self.api_key)
if self.device_key != "":
self.save('device_key', self.device_key)
def apply_standalone_settings(self):
if self.current_post_method != 'email':
self.save('post_method', 'email')
self.save('check_url', self.text('check_url'))
self.save('mail_to', self.text('mail_to'))
self.save('smtp_server', self.text('smtp_server'))
self.save('smtp_username', self.text('smtp_username'))
smtp_password = self.text('smtp_password')
if smtp_password != '':
encoded_pass = os.popen('echo -n "'+ smtp_password +'" | openssl enc -base64').read().strip()
self.save('smtp_password', encoded_pass)
self.exit_configurator()
def exit_configurator(self):
self.run_prey()
self.show_alert(_("Success"), _("Configuration saved! Your device is now setup and being tracked by Prey. Happy hunting!"), True)
def run_prey(self):
os.system(PREY_PATH + '/prey.sh > /var/log/prey.log &')
################################################
# control panel api
################################################
def report_connection_issue(self):
self.show_alert(_("Problem connecting"), _("We seem to be having a problem connecting to your Control Panel. This is likely a temporary issue. Please try again in a few moments."))
def user_has_available_slots(self, string):
matches = re.search(r"<available_slots>(\w*)</available_slots>", string)
if matches and int(matches.groups()[0]) > 0:
return True
else:
return False
def get_api_key(self, string):
matches = re.search(r"<key>(\w*)</key>", string)
if matches:
self.api_key = matches.groups()[0]
def get_device_keys(self, string, has_available_slots):
hostname = os.popen("hostname").read().strip()
devices = self.get('device')
index = -1
chosen = index
liststore = gtk.ListStore(str,str)
devices.clear()
matches = re.findall(r"<device>\s*<key>(\w*)</key>.*?<title>([\s\w]*)</title>\s*</device>", string, re.DOTALL)
for match in matches:
index += 1
key = match[0]
title = match[1]
liststore.append([title,key])
if key == self.current_device_key: #set the choice because we have a matching device key
chosen = index
elif title.lower() == hostname.lower and chosen < 0: #set the choice because we likely have a matching title (but device key takes precedence)
chosen = index
if index < 0:
#self.get('create_new_device').set_active(True)
self.show_alert(_("No devices exist"), _("There are no devices currently defined in your Control Panel.\n\nPlease select the option to create a new device."))
return False
devices.set_model(liststore)
cell = gtk.CellRendererText()
devices.pack_start(cell, True)
devices.add_attribute(cell, 'text', 0)
devices.set_active(chosen)
return True
def create_user(self):
self.email = self.text('email')
params = urllib.urlencode({'user[name]': self.text('user_name'), 'user[email]': self.email, 'user[password]': self.text('password'), 'user[password_confirmation]' : self.text('password_confirm')})
# params = 'user[name]='+self.text('user_name')+'&user[email]='+self.email+'&user[password]='+self.text('password')+'&user[password_confirmation]='+self.text('password_confirm')
result = os.popen('curl -i -s -k --connect-timeout 5 '+ CONTROL_PANEL_URL_SSL + '/users.xml -d \"'+params+'\"').read().strip()
if result.find("<key>") != -1:
self.get_api_key(result)
self.device_key = ""
elif result.find("Email has already been taken") != -1:
self.show_alert(_("Email has already been taken"), _("That email address already exists! If you signed up previously, please go back and select the Existing User option."))
return
else:
self.show_alert(_("Couldn't create user!"), _("There was a problem creating your account. Please make sure the email address you entered is valid, as well as your password."))
return
self.apply_control_panel_settings()
self.run_prey()
self.show_alert(_("Account created!"), _("Your account has been succesfully created and configured in Prey's Control Panel.\n\nPlease check your inbox now, you should have received a verification email."), True)
def get_existing_user(self, show_devices):
self.email = self.text('existing_email')
password = self.text('existing_password')
result = os.popen('curl -i -s -k --connect-timeout 5 '+ CONTROL_PANEL_URL_SSL + '/profile.xml -u '+self.email+":'"+password+"'").read().strip()
if result.find('401 Unauthorized') != -1:
self.show_alert(_("User does not exist"), _("Couldn't log you in. Remember you need to activate your account opening the link we emailed you.\n\nIf you forgot your password please visit preyproject.com."))
return
if result.find("<user>") != -1:
self.get_api_key(result)
else:
self.report_connection_issue()
return False
has_available_slots = self.user_has_available_slots(result)
if not has_available_slots and not show_devices:
self.show_alert(_("Not allowed"), _("It seems you've reached your limit for devices!\n\nIf you had previously added this PC, you should select the \"Device already exists\" option to select the device from a list of devices you have already defined.\n\nIf this is a new device, you can also upgrade to a Pro Account to increase your slot count and get access to additional features. For more information, please check\nhttp://preyproject.com/plans."))
return False
if show_devices:
result = os.popen('curl -i -s -k --connect-timeout 5 '+ CONTROL_PANEL_URL_SSL + '/devices.xml -u '+self.email+":'"+password+"'").read().strip()
if result.find("</devices>") != -1:
return self.get_device_keys(result,has_available_slots)
else:
self.report_connection_issue()
return False
else:
self.device_key = ""
self.apply_control_panel_settings()
self.exit_configurator()
def apply_device_settings(self):
devices = self.get('device')
model = devices.get_model()
self.device_key = model.get_value(devices.get_active_iter(),1)
self.apply_control_panel_settings()
self.exit_configurator()
def __init__(self):
if not self.prey_exists() or not self.is_config_writable():
gtk.main()
exit(1)
self.get_current_settings()
builder = gtk.Builder()
builder.set_translation_domain(app_name)
builder.add_from_file(script_path + "/prey-config.glade")
builder.connect_signals({
"on_window_destroy" : gtk.main_quit,
"prev_page" : self.prev_page,
"next_page" : self.next_page,
"toggle_buttons" : self.toggle_buttons,
"apply_settings" : self.apply_settings,
"toggle_pg3_next_apply" : self.toggle_pg3_next_apply,
"set_default_action" : self.set_default_action,
"ensure_visible" : self.ensure_visible,
"key_pressed" : self.key_pressed,
"close_about" : self.close_about
})
self.window = builder.get_object("window")
self.window.set_title(self.window.get_title() + " (v" + VERSION + ")")
# self.window.get_settings().set_string_property('gtk-font-name', 'sans normal 11','');
self.pages = builder.get_object("reporting_mode_tabs")
self.root = builder
self.get('delay').grab_focus()
about = self.get('about_prey_config')
about.set_version(VERSION)
self.display_real_settings()
self.check_if_configured()
if __name__ == "__main__":
app = PreyConfigurator()
gtk.main()
|
import unittest
from rest_test import compare
class DictTestCase(unittest.TestCase):
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b='2',
a=1
)
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=1
)
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=dict(
a='test'
),
c=''
)
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=dict(
b=1
),
c=''
)
)
self.assertFalse(compare(data, expected_data))
class ItemEllipsisTestCase(unittest.TestCase):
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b='2',
a=...
)
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=...
)
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=...,
c=''
)
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=3,
b=...,
c=''
)
)
self.assertFalse(compare(data, expected_data))
def test_missing_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
a=...
)
self.assertFalse(compare(data, expected_data))
def test_moreover_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=...,
c='test'
)
self.assertFalse(compare(data, expected_data))
def test_missing_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=...,
)
)
self.assertFalse(compare(data, expected_data))
def test_moreover_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=3,
b=...,
c='',
d='test'
)
)
self.assertFalse(compare(data, expected_data))
class DictEllipsisTestCase(unittest.TestCase):
def test_empty(self):
data = dict(
)
expected_data = {
...: ...
}
assert compare(data, expected_data)
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = {
...: ...
}
assert compare(data, expected_data)
def test_basic_more(self):
data = {
'a': 1,
'b': '2',
'c': 3
}
expected_data = {
...: ...,
'b': '2'
}
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = {
'b': 2,
...: ...
}
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 2,
...: ...,
'c': ''
}
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 3,
...: ...,
'c': ''
}
)
self.assertFalse(compare(data, expected_data))
def test_moreover_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = {
'b': 2,
...: ...,
'c': 'test'
}
self.assertFalse(compare(data, expected_data))
def test_missing_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 2,
...: ...
}
)
assert compare(data, expected_data)
def test_moreover_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 3,
...: ...,
'c': '',
'd': 'test'
}
)
self.assertFalse(compare(data, expected_data))
def test_bad_usage(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = {
'a': 1,
...: dict(
b=dict(
a='test'
),
a=2,
c=''
)
}
with self.assertRaises(TypeError):
compare(data, expected_data)
class ListTestCase(unittest.TestCase):
def test_basic(self):
data = [
1,
'2'
]
expected_data = [
1,
'2'
]
assert compare(data, expected_data)
def test_basic_false(self):
data = [
1,
2
]
expected_data = [
2,
1
]
self.assertFalse(compare(data, expected_data))
def test_combination(self):
data = [
dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
),
dict(
a=2,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
]
expected_data = [
dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
),
dict(
a=2,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
]
assert compare(data, expected_data)
class ListEllipsisTestCase(unittest.TestCase):
def test_empty(self):
data = [
'1',
{},
3
]
expected_data = [
...
]
assert compare(data, expected_data)
def test_start(self):
data = [
'1',
{},
3
]
expected_data = [
...,
3
]
assert compare(data, expected_data)
def test_multiple(self):
data = [
'1',
2,
3,
'4',
5
]
expected_data = [
...,
2,
...
]
assert compare(data, expected_data)
def test_end(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
1,
...
]
assert compare(data, expected_data)
def test_multiple_in(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
2,
...,
5,
...
]
assert compare(data, expected_data)
def test_start_false(self):
data = [
1,
2,
3
]
expected_data = [
...,
4
]
self.assertFalse(compare(data, expected_data))
def test_multiple_false(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
...,
6,
...
]
self.assertFalse(compare(data, expected_data))
def test_end_false(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
2,
...
]
self.assertFalse(compare(data, expected_data))
def test_multiple_in_optional(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
2,
...,
3,
...
]
assert compare(data, expected_data)
def test_multiple_in_optional_between(self):
data = [
2,
3,
]
expected_data = [
...,
2,
...,
3,
...
]
assert compare(data, expected_data)
def test_bad_usage(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
...,
7
]
with self.assertRaises(TypeError):
compare(data, expected_data)
def test_one(self):
data = [1]
expected_data = [..., 1, ...]
assert compare(data, expected_data)
class CombinationEllipsisTestCase(unittest.TestCase):
def test_combination(self):
data = [
{
'foo': 1,
'bar': 2,
'zoo': 3,
}
]
expected_data = [
...,
{
...: ...,
'bar': 2
},
...
]
assert compare(data, expected_data)
def test_combination_empty(self):
data = [
{
}
]
expected_data = [
...,
{
...: ...,
},
...
]
assert compare(data, expected_data)
class TypeTestCase(unittest.TestCase):
def test_list(self):
data = [
'1',
{},
3
]
expected_data = list
assert compare(data, expected_data)
def test_dict(self):
data = {
'1': 2,
2: 3,
3: 2
}
expected_data = dict
assert compare(data, expected_data)
def test_list_with_dict(self):
data = [
'1',
{'test': 'test_value'},
3
]
expected_data = [
'1',
dict,
3
]
assert compare(data, expected_data)
def test_dict_with_list(self):
data = {
'1': 2,
'test_key': [1, 2, 'u'],
3: 2
}
expected_data = {
'1': 2,
'test_key': list,
3: 2
}
assert compare(data, expected_data)
def test_different_types_in_list(self):
data = [
'1',
{},
3
]
expected_data = [
str,
dict,
int
]
assert compare(data, expected_data)
def test_different_types_in_dict(self):
data = {
'1': 2,
2: 'test',
3: [1, 2, 3]
}
expected_data = {
'1': int,
2: str,
3: list
}
assert compare(data, expected_data)
def test_different_types_in_dict_in_deep(self):
data = [
'1',
{
'1': 2,
2: 'test',
3: [1, 2, 3]
},
3
]
expected_data = [
'1',
{
'1': int,
2: str,
3: list
},
3
]
assert compare(data, expected_data)
class CombinationTypeEllipsisTestCase(unittest.TestCase):
def test_combination(self):
data = [
{
'foo': 1,
'bar': 2,
'zoo': 3,
},
{
'test_foo': '1',
'test_bar': 2,
'test_zoo': [1, 2, 3],
},
]
expected_data = [
...,
{
...: ...,
'bar': int
},
...,
{
'test_foo': str,
'test_bar': 2,
'test_zoo': list,
}
]
assert compare(data, expected_data)
if __name__ == '__main__':
unittest.main()
|
import logging
from django.http import HttpResponseRedirect, HttpResponseNotAllowed
from django.shortcuts import get_object_or_404, render
from django.db.models import Count, Q
from django.urls import reverse
from django.contrib.auth.decorators import permission_required
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from django.utils import timezone
from django.views.decorators.http import require_safe, require_http_methods
from django.conf import settings
from toolkit.members.forms import NewMemberForm, MemberForm
from toolkit.members.models import Member, Volunteer
from toolkit.util import compare_constant_time
from toolkit.toolkit_auth.decorators import ip_or_permission_required
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@ip_or_permission_required(ip_addresses=settings.CUBE_IP_ADDRESSES,
permission='toolkit.write')
@require_http_methods(["GET", "POST"])
def add_member(request):
# If this view is called with GET then display the form to enter a new
# member. If called with POST then take parameters out of the body of
# the request and create a new member
if request.method == 'POST':
# Create new member object
instance = Member()
# Create form object to process the submitted data (data is pulled
# out of the request.POST automatically)
form = NewMemberForm(request.POST, instance=instance)
# Validate form fields
if form.is_valid():
# Check for existing email address:
if (instance.email
and Member.objects.filter(email=instance.email).exists()):
logger.info(
'Member with email %s already in database', instance.email)
messages.add_message(
request,
messages.WARNING,
"%s already in members' database" % instance.email)
return HttpResponseRedirect(
reverse("search-members") + "?email=%s&q=" % instance.email)
# Form is valid, save data:
logger.info("Adding member '%s <%s>'",
instance.name, instance.email)
member = form.save(commit=False)
member.gdpr_opt_in = timezone.now()
member.save()
# Member added ok, new blank form:
form = NewMemberForm()
messages.add_message(request, messages.SUCCESS,
"Added member: %s" % instance.number)
return HttpResponseRedirect(reverse("add-member"))
elif request.method == 'GET':
# GET request; create form object with default values
form = NewMemberForm()
context = {
'form': form,
}
return render(request, 'form_new_member.html', context)
@permission_required('toolkit.read')
@require_safe
def search(request):
search_terms = request.GET.get('q', None)
email_search = request.GET.get('email', None)
results = None
results = Member.objects
if email_search:
results = results.filter(email=email_search)
if search_terms:
results = results.filter(
Q(name__icontains=search_terms) |
Q(email__icontains=search_terms) |
Q(number=search_terms)).order_by('name')
if search_terms or email_search:
context = {
'search_terms': search_terms or email_search,
'members': results,
'membership_expiry_enabled': settings.MEMBERSHIP_EXPIRY_ENABLED,
}
return render(request, 'search_members_results.html', context)
context = {}
return render(request, 'search_members.html', context)
@permission_required('toolkit.read')
@require_safe
def view(request, member_id):
# Is this view actually used?
member = get_object_or_404(Member, id=member_id)
return render(request, 'view_member.html', {
'member': member,
'membership_expiry_enabled': settings.MEMBERSHIP_EXPIRY_ENABLED,
})
@require_http_methods(["GET", "POST"])
def delete_member(request, member_id):
if not _check_access_permitted_for_member_key(
'toolkit.write', request, member_id):
# Manually wrap this function in the standard 'permission_required'
# decorator and call it to get the redirect to the login page:
return permission_required('toolkit.write')(delete_member)(
request, member_id)
# See comments in edit_member
# TODO if a punter has already deleted themselves and clicks
# on their mailout link again, they will get the login page, which
# will probably confuse them.
member = get_object_or_404(Member, id=member_id)
# Did we get access to this page using a valid email key?
access_using_key = _member_key_matches_request(request, member)
if request.method == "GET" and not access_using_key:
# Only allow GET requests to delete things if they were accompanied
# by a valid access key for the given member
return HttpResponseNotAllowed(["POST"])
user_has_permission = request.user.has_perm('toolkit.write')
vol = None
try:
vol = member.volunteer
except Volunteer.DoesNotExist:
pass
# Check the person being deleted isn't an active volunteer.
if vol and vol.active:
# Volunteers who tried to delete their own record using an email link
# get a special message:
if access_using_key:
logger.info("Futile attempt by active volunteer %s "
"<%s> to delete themselves"
% (member.name, member.email))
# TODO send mail to admins
return render(request, 'email_admin.html')
else:
messages.add_message(
request, messages.ERROR,
"Can't delete active volunteer %s "
"(%s). Retire them first."
% (member.name, member.number))
logger.info(
"Attempt to delete active volunteer %s %s <%s>"
% (member.number, member.name, member.email))
return HttpResponseRedirect(reverse("search-members"))
# Logged in, and not following an email link, so just delete:
elif user_has_permission and not access_using_key:
messages.add_message(
request, messages.SUCCESS,
"Deleted member: %s (%s)"
% (member.number, member.name))
logger.info(
"Member %s %s <%s> deleted by admin"
% (member.number, member.name, member.email))
member.delete() # This will delete associated volunteer record, if any
return HttpResponseRedirect(reverse("search-members"))
# Not logged in (or logged in, but using a valid email link) must be an
# email link, so confirm:
else:
confirmed = request.GET.get('confirmed', 'no')
if confirmed == 'yes':
logger.info("Member %s %s <%s> self-deleted",
member.number, member.name, member.email)
member.delete()
return HttpResponseRedirect(reverse("goodbye"))
else:
return render(request, 'confirm-deletion.html')
def _member_key_matches_request(request, member):
"""
Utility method; returns True if the current request has a value 'k'
which is the same as the mailout_key for the given member_object.
"""
try:
member_key = ''
if request.method == 'GET':
member_key = request.GET.get('k', None)
elif request.method == 'POST':
member_key = request.POST.get('k', None)
assert not isinstance(member_key, bytes)
if isinstance(member_key, str):
# Use compare_constant_time instead of == to avoid timing
# attacks (no, really - read up on it)
return compare_constant_time(
member.mailout_key.encode("ascii"),
member_key.encode("ascii"))
# Keys should really both be ASCII, so this is very unlikely to
# raise an error unless someone intentionally feeds in
# junk
except UnicodeEncodeError:
# If key value is garbage:
return False
def _check_access_permitted_for_member_key(permission, request, member_id):
"""
Utility method; returns True if either user is logged on and has the
given permission, or if the current request has a value 'k' which is the
same as the mailout_key for the given member_id."""
# Check if user is logged in and has permission to edit:
access_permitted = request.user.has_perm(permission)
# If not, check if a 'k' parameter was supplied in the request, and if it
# matches the member's mailout_key then go ahead
if not access_permitted:
try:
member = Member.objects.get(id=member_id)
access_permitted = _member_key_matches_request(
request, member)
except ObjectDoesNotExist:
# If member doesn't exist, or key value is garbage:
access_permitted = False
return access_permitted
# This view (and unsubscribe_member below) can be accessed both by logged in
# users and if the magic key associated with the member record is passed in the
# request
@require_http_methods(["GET", "POST"])
def edit_member(request, member_id):
if not _check_access_permitted_for_member_key(
'toolkit.write', request, member_id):
# Manually wrap this function in the standard 'permission_required'
# decorator and call it to get the redirect to the login page:
return permission_required('toolkit.write')(edit_member)(
request, member_id)
# (To elaborate:
# permission_required('toolkit.write')
# is the decorator used elsewhere. Writing:
# permission_required('toolkit.write')(edit_member)
# returns the function with the decorator applied, then
#
# permission_required('toolkit.write')(edit_member)(request, member_id)
#
# calls the wrapped function, passing in the arguments originaly
# supplied. Capice?
member = get_object_or_404(Member, id=member_id)
user_has_permission = request.user.has_perm('toolkit.write')
context = {}
if request.method == 'POST':
form = MemberForm(request.POST, instance=member,
hide_internal_fields=not user_has_permission)
if form.is_valid():
logger.info(u"Saving changes to member '{0}' (id: {1})".format(
member.name, member.pk))
form.save()
messages.add_message(request, messages.SUCCESS,
u"Member {0} updated".format(member.number))
if request.user.has_perm('toolkit.write'):
return HttpResponseRedirect(reverse("search-members"))
else:
form = MemberForm(instance=member,
hide_internal_fields=not user_has_permission)
context = {
'member': member,
'form': form,
'membership_expiry_enabled': settings.MEMBERSHIP_EXPIRY_ENABLED,
'membership_length_days': settings.MEMBERSHIP_LENGTH_DAYS,
}
return render(request, 'form_member.html', context)
# This view (and edit_member above) can be accessed both by logged in users and
# if the magic key associated with the member record is passed in in the
# request
@require_http_methods(["GET", "POST"])
def unsubscribe_member(request, member_id):
if not _check_access_permitted_for_member_key('toolkit.write', request,
member_id):
# Manually wrap this function in the standard 'permission_required'
# decorator and call it to get the redirect to the login page:
return permission_required('toolkit.write')(unsubscribe_member)(
request, member_id)
member = get_object_or_404(Member, id=member_id)
if request.method == 'POST':
# Default to unsubscribe
action = request.POST.get('action', 'unsubscribe')
confirm = request.POST.get('confirm', False)
if confirm == "yes" and action in ('unsubscribe', 'subscribe'):
member.mailout = (action == 'subscribe')
member.save()
logger.info(u"{0} member '{1}' (id: {2}) from mailing list"
.format(action, member.name, member.pk))
messages.add_message(request, messages.SUCCESS,
u"Member {0} {1}d".format(
member.number, action))
action = 'unsubscribe' if member.mailout else 'subscribe'
return render(request, 'form_member_edit_subs.html',
{'member': member, 'action': action})
# This view can be accessed both by logged in users and if the magic key
# associated with the member record is passed in in the request. The
# difference with the above view is that this one does not ask for user
# confirmation. The idea is that this view is called from a script to
# programmatically unsubcribe members if their emails bounce (meeting certain
# bouncing criteria - e.g. not vacation responses )
@require_http_methods(["GET"])
def unsubscribe_member_right_now(request, member_id):
if not _check_access_permitted_for_member_key('toolkit.write', request,
member_id):
return permission_required('toolkit.write')(unsubscribe_member)(
request, member_id)
member = get_object_or_404(Member, id=member_id)
action = 'unsubscribe'
member.mailout = False
member.save()
logger.info(u"{0} member '{1}' (id: {2}) from mailing list"
.format(action, member.name, member.pk))
messages.add_message(request, messages.SUCCESS,
u"Member {0} {1}d".format(member.number, action))
return render(request, 'form_member_edit_subs.html',
{'member': member, 'action': action})
@require_http_methods(["GET", "POST"])
def opt_in(request, member_id):
if not _check_access_permitted_for_member_key('toolkit.write', request,
member_id):
return permission_required('toolkit.write')(unsubscribe_member)(
request, member_id)
member = get_object_or_404(Member, id=member_id)
if request.method == 'POST':
# Default to opt-in
action = request.POST.get('action', 'opt-in')
confirm = request.POST.get('confirm', False)
if confirm == "yes":
if action == 'opt-in':
member.gdpr_opt_in = timezone.now()
messages.add_message(
request,
messages.SUCCESS,
"Thank you %s for opting in to continue to "
"receive our emails" % member.name)
else: # opt-out
member.gdpr_opt_in = None
messages.add_message(
request,
messages.SUCCESS,
"We are sorry to see you have opted out. If you do not "
"opt-in by 25 May 2018 we will delete your membership "
"from our records.")
member.save()
logger.info(u"Member '{0}' (id: {1}) <{2}>: {3} on {4}"
.format(member.name,
member.pk,
member.email,
action,
member.gdpr_opt_in)
)
action = 'opt-out' if member.gdpr_opt_in else 'opt-in'
return render(request, 'form_member_edit_opt_in.html',
{'member': member, 'action': action})
@require_safe
def member_statistics(request):
# View for the 'statistics' page of the 'membership database'
# Get 10 most popular email domains:
email_stats = Member.objects.get_stat_popular_email_domains()
# Get 10 most popular postcode prefixes:
postcode_stats = Member.objects.get_stat_popular_postcode_prefixes()
# Some of the simpler stats are done using the django ORM
context = {
# Results of complex queries:
'email_stats': email_stats,
'postcode_stats': postcode_stats,
# Total number of members:
'm_count': Member.objects.count(),
# Members with an email address that isn't null/blank:
'm_email_count': Member.objects
.filter(email__isnull=False)
.exclude(email='')
.count(),
# Members with an email address that isn't null/blank, where mailout
# hasn't failed & they haven't unsubscribed:
'm_email_viable': Member.objects.mailout_recipients().count(),
# Members with an email address that isn't null/blank, where mailout
# hasn't failed & they have unsubscribed:
'm_email_unsub': Member.objects
.filter(email__isnull=False)
.exclude(email='')
.exclude(mailout_failed=True)
.exclude(mailout=True)
.count(),
# Members with a postcode that isn't null / blank
'm_postcode': Member.objects
.filter(postcode__isnull=False)
.exclude(postcode='')
.count(),
# Members who aren't actually members, who don't get the mailout
'm_cruft': Member.objects
.filter(email__isnull=False)
.exclude(email='')
.exclude(mailout_failed=True)
.exclude(mailout=True)
.exclude(is_member=True)
.count(),
# Members with email without GDPR opt-in
'm_no_gdpr': Member.objects
.mailout_recipients()
.filter(gdpr_opt_in__isnull=True)
.count(),
}
if settings.MEMBERSHIP_EXPIRY_ENABLED:
extra_context = {
'm_unexpired_count': Member.objects.unexpired().count(),
'm_expired_count': Member.objects.expired().count(),
}
context.update(extra_context)
return render(request, 'stats.html', context)
@permission_required('toolkit.read')
@require_safe
def member_duplicates(request):
order = request.GET.get('order', 'email')
sort_type = 'email'
dupes = (Member.objects.values('email')
.exclude(email='')
.annotate(Count('id'))
.filter(id__count__gt=1))
members = Member.objects.filter(
email__in=[item['email'] for item in dupes])
if 'number' in order:
members = members.order_by('number')
sort_type = 'number'
if 'name' in order:
members = members.order_by('name')
sort_type = 'name'
if 'email' in order:
members = members.order_by('email')
sort_type = 'email'
if 'created-most-recent-first' in order:
members = members.order_by('-created_at')
sort_type = 'creation date, most recent first'
if 'created-oldest-first' in order:
members = members.order_by('created_at')
sort_type = 'creation date, oldest first'
if 'updated-most-recent-first' in order:
members = members.order_by('-updated_at')
sort_type = 'last update, most recent first'
if 'updated-oldest-first' in order:
members = members.order_by('updated_at')
sort_type = 'last update, oldest first'
context = {
'sort_type': sort_type,
'members': members,
'dupe_count': len(dupes),
'member_count': len(members),
}
return render(request, 'dupes.html', context)
@require_safe
def member_homepages(request):
members = (Member.objects.filter(website__isnull=False)
.exclude(website='')
.order_by('number')
.values('name', 'website'))
return render(request, 'homepages.html', {
'members': members}
)
def goodbye(request):
return render(request, 'goodbye.html')
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
#
# Pybot server
#
# Copyright (c) 2012-2014 Alan Aguiar alanjas@hotmail.com
# Copyright (c) 2012-2014 Butiá Team butia@fing.edu.uy
# Butia is a free and open robotic platform
# www.fing.edu.uy/inco/proyectos/butia
# Facultad de Ingeniería - Universidad de la República - Uruguay
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import sys
import imp
import select
import socket
import usb4butia
import com_chotox
PYBOT_PORT = 2009
BUFSIZ = 1024
MAX_CLIENTS = 4
class Server():
def __init__(self, debug=False, chotox=False):
self.debug = debug
self.run = True
self.comms = imp.load_source('server_functions', 'server_functions.py')
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(("", PYBOT_PORT))
self.socket.listen(MAX_CLIENTS)
self.clients = {}
self.chotox_mode = chotox
if self.chotox_mode:
self.robot = com_chotox.Chotox(debug=self.debug)
else:
self.robot = usb4butia.USB4Butia(debug=self.debug)
def init_server(self):
inputs = [self.socket]
while self.run:
try:
inputready,outputready,exceptready = select.select(inputs, [], [])
except Exception, err:
print 'Error in select', err
break
for s in inputready:
if s == self.socket:
client, addr = self.socket.accept()
print 'New client: ', str(addr)
inputs.append(client)
self.clients[client] = addr
else:
try:
data = s.recv(BUFSIZ)
if data:
result = ''
r = data.replace('\r', '')
r = r.replace('\n', '')
r = r.split(' ')
if len(r) > 0:
com = r[0]
if hasattr(self.comms, com):
f = getattr(self.comms, com) #aca se devuelve la funcion CALL
result = f(self, r[1:])
result = str(result)
s.send(result + '\n')
else:
s.close()
inputs.remove(s)
self.clients.pop(s)
except Exception, err:
print 'Error in recv', err
inputs.remove(s)
self.clients.pop(s)
print 'Closing server'
self.socket.close()
self.robot.close()
def show_help():
print "Open PyBot server in PORT 2009"
print ""
print "Usage:"
print " pybot_server.py [OPTIONS]"
print ""
print "Opciones:"
print " -h, --help muestra esta ayuda"
print " chotox simulador de robot"
print " DEBUG habilita los mensajes de depuración"
print ""
if __name__ == "__main__":
argv = sys.argv[:]
if ("-h" in argv) or ("--help" in argv):
show_help()
else:
chotox = 'chotox' in argv
debug = 'DEBUG' in argv
s = Server(debug, chotox)
s.init_server()
|
###
### This script retrieves the coverage profiles of RNA-seq and Ribo-seq for all ribosomal protein genes. It stores it as text files.
###
import sys,numpy,HTSeq
import multiprocessing,multiprocessing.pool
def analysis(genomicFeature):
'''
This function computes the histograms of reads across transcript lengths.
'''
print('\t computing coverage for {}...'.format(genomicFeature))
# f.1 define window of coverage depending if it's an operon or a gene
print('\t\t computing window...')
if genomicFeature in riboOperons.keys(): # work with operons
print(genomicFeature)
# obtain the relevant features
contigs=[]; starts=[]; ends=[]; strands=[]
localGenes=riboOperons[genomicFeature]
for feature in annotationObject:
if feature.type == 'gene':
strippedID=feature.attr['ID']
if strippedID in localGenes:
contig=feature.iv.chrom
start=feature.iv.start+1
end=feature.iv.end
strand=feature.iv.strand
contigs.append(contig); starts.append(start); ends.append(end); strands.append(strand)
# check consistency of strands
if len(list(set(strands))) > 1:
print('Detected gene in operon with different orientation. Exiting...')
sys.exit()
# define positions for coverage computing
contig=contigs[0]
start=min(starts)
end=max(ends)
strand=strands[0]
windowStart=start-margin
windowEnd=end+margin+1
windowP=HTSeq.GenomicInterval(contig,windowStart,windowEnd,"+")
windowM=HTSeq.GenomicInterval(contig,windowStart,windowEnd,"-")
else: # work with genes
for feature in annotationObject:
if feature.type == 'gene':
strippedID=feature.attr['ID']
if strippedID == genomicFeature:
break
# define positions for coverage computing
contig=feature.iv.chrom
start=feature.iv.start+1
end=feature.iv.end
strand=feature.iv.strand
windowStart=start-margin
windowEnd=end+margin+1
windowP=HTSeq.GenomicInterval(contig,windowStart,windowEnd,"+")
windowM=HTSeq.GenomicInterval(contig,windowStart,windowEnd,"-")
# f.2. compute coverage based on window
print('\t\t computing coverage...')
coverage=HTSeq.GenomicArray("auto",stranded=True,typecode="i")
for timepoint in timepoints:
for replicate in replicates:
for experiment in experiments:
# f.1. define the bam file
bamFile=bamFilesDir+'{}.{}.{}/Aligned.sortedByCoord.out.bam'.format(experiment,replicate,timepoint)
# f.2. read BAM file
sortedBAMfile=HTSeq.BAM_Reader(bamFile)
for alignment in sortedBAMfile:
if alignment.aligned:
coverage[ alignment.iv ] += 1
# f.3. compute coverage
profileP=list(coverage[windowP])
profileM=list(coverage[windowM])
# f.4. define genomic positions with respect to strands
loc=numpy.arange(windowStart,windowEnd)
if strand == '+':
pos=loc
elif strand == '-':
pos=loc[::-1]
else:
print('error at strand selection')
sys.exit()
# f.5. writing a file
fileName='{}{}.{}.{}.{}.txt'.format(coverageDir,timepoint,replicate,genomicFeature,experiment)
f=open(fileName,'w')
f.write('# name {}\n'.format(genomicFeature))
f.write('# timepoint {}\n'.format(timepoint))
f.write('# replicate {}\n'.format(replicate))
f.write('# strand {}\n'.format(strand))
f.write('# experiment {}\n'.format(experiment))
f.write('# sumP,sumM {},{}\n'.format(sum(profileP),sum(profileM)))
f.write('# location \t counts on strand plus \t counts on strand minus\n')
for i in range(len(pos)):
f.write('{}\t{}\t{}\n'.format(pos[i],profileP[i],profileM[i]))
f.close()
return None
def dataReader():
'''
This function reads the ribosomal protein operons and genes.
'''
# f.1. ribo-pt gene operons
operonPredictions={}
fileName=operonPredictionsDir+'riboPtOperons.txt'
with open(fileName,'r') as f:
next(f)
for line in f:
vector=line.split('\t')
name=vector[0]
genes=[]
for i in range(len(vector)-1):
gene=vector[i+1].replace('\n','')
genes.append(gene)
operonPredictions[name]=genes
# f.2. non-operon ribo-pt genes
NORPGs=[]
fileName=operonPredictionsDir+'NORPGs.txt'
with open(fileName,'r') as f:
next(f)
for line in f:
vector=line.split('\t')
name=vector[0].replace('\n','')
NORPGs.append(name)
# f.3. print information about retrieval
a=[]
for operon in operonPredictions:
for name in operonPredictions[operon]:
if name not in a:
a.append(name)
print('\t Recovered {} genes in {} operons.'.format(len(a),len(operonPredictions)))
print('\t Recovered {} genes not in operons.'.format(len(NORPGs)))
for name in NORPGs:
if name not in a:
a.append(name)
print('\t Total genes recovered: {}'.format(len(a)))
return operonPredictions,NORPGs
###
### MAIN
###
# 0. user defined variables
bamFilesDir='/Volumes/omics4tb/alomana/projects/TLR/data/BAM/'
annotationFile='/Volumes/omics4tb/alomana/projects/TLR/data/genome/alo.build.NC002607.NC001869.NC002608.gff3'
coverageDir='/Volumes/omics4tb/alomana/projects/TLR/data/coverage/'
operonPredictionsDir='/Volumes/omics4tb/alomana/projects/TLR/data/microbesOnline/'
timepoints=['tp.1','tp.2','tp.3','tp.4']
replicates=['rep.1','rep.2','rep.3']
experiments=['rbf','trna']
margin=100 # excess of base pairs
# 1. read data
print('Reading data...')
riboOperons,NORPGs=dataReader()
# 2. iterate analysis over ribosomal proteins
print('Performing analysis...')
# 2.1. read annotation file
annotationObject=HTSeq.GFF_Reader(annotationFile)
# 2.2. selecting appropriate genomic locations
genomicFeatures=list(riboOperons.keys())+NORPGs
genomicFeatures.sort()
genomicFeatures=['gene-VNG_RS06605']
# 2.3.a. iterate over genomicFeatures in a parallel manner
numberOfThreads=len(genomicFeatures)
print('Initialized parallel analysis using {} threads...'.format(numberOfThreads))
hydra=multiprocessing.pool.Pool(numberOfThreads)
tempo=hydra.map(analysis,genomicFeatures)
print('... completed.')
# 2.3.b. iterate over genomicFeatures single-thread
#for genomicFeature in genomicFeatures:
# analysis(genomicFeature)
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the Tacker APIs
"""
from __future__ import print_function
import argparse
import getpass
import inspect
import itertools
import logging
import os
import sys
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient import discover
from keystoneclient.openstack.common.apiclient import exceptions as ks_exc
from keystoneclient import session
from oslo_utils import encodeutils
import six.moves.urllib.parse as urlparse
from cliff import app
from cliff import commandmanager
from tackerclient.common import clientmanager
from tackerclient.common import command as openstack_command
from tackerclient.common import exceptions as exc
from tackerclient.common import extension as client_extension
from tackerclient.common import utils
from tackerclient.i18n import _
from tackerclient.tacker.v1_0 import extension
from tackerclient.tacker.v1_0.vm import device
from tackerclient.tacker.v1_0.vm import device_template
from tackerclient.tacker.v1_0.vm import vnf
from tackerclient.tacker.v1_0.vm import vnfd
from tackerclient.version import __version__
VERSION = '1.0'
TACKER_API_VERSION = '1.0'
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
def check_non_negative_int(value):
try:
value = int(value)
except ValueError:
raise argparse.ArgumentTypeError(_("invalid int value: %r") % value)
if value < 0:
raise argparse.ArgumentTypeError(_("input value %d is negative") %
value)
return value
class BashCompletionCommand(openstack_command.OpenStackCommand):
"""Prints all of the commands and options for bash-completion."""
resource = "bash_completion"
COMMAND_V1 = {
'bash-completion': BashCompletionCommand,
'ext-list': extension.ListExt,
'ext-show': extension.ShowExt,
'device-template-create': device_template.CreateDeviceTemplate,
'device-template-list': device_template.ListDeviceTemplate,
'device-template-show': device_template.ShowDeviceTemplate,
'device-template-update': device_template.UpdateDeviceTemplate,
'device-template-delete': device_template.DeleteDeviceTemplate,
'device-create': device.CreateDevice,
'device-list': device.ListDevice,
'device-show': device.ShowDevice,
'device-update': device.UpdateDevice,
'device-delete': device.DeleteDevice,
'interface-attach': device.AttachInterface,
'interface-detach': device.DetachInterface,
# MANO lingo
'vnfd-create': vnfd.CreateVNFD,
'vnfd-delete': vnfd.DeleteVNFD,
'vnfd-list': vnfd.ListVNFD,
'vnfd-show': vnfd.ShowVNFD,
'vnf-create': vnf.CreateVNF,
'vnf-update': vnf.UpdateVNF,
'vnf-delete': vnf.DeleteVNF,
'vnf-list': vnf.ListVNF,
'vnf-show': vnf.ShowVNF,
# 'vnf-config-create'
# 'vnf-config-push'
}
COMMANDS = {'1.0': COMMAND_V1}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class TackerShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(TackerShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('tacker.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
self._register_extensions(VERSION)
# Pop the 'complete' to correct the outputs of 'tacker help'.
self.command_manager.commands.pop('complete')
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. You can repeat this option.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors.'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit."))
parser.add_argument(
'-r', '--retries',
metavar="NUM",
type=check_non_negative_int,
default=0,
help=_("How many times the request to the Tacker server should "
"be retried if it fails."))
# FIXME(bklei): this method should come from python-keystoneclient
self._append_global_identity_args(parser)
return parser
def _append_global_identity_args(self, parser):
# FIXME(bklei): these are global identity (Keystone) arguments which
# should be consistent and shared by all service clients. Therefore,
# they should be provided by python-keystoneclient. We will need to
# refactor this code once this functionality is available in
# python-keystoneclient.
#
# Note: At that time we'll need to decide if we can just abandon
# the deprecated args (--service-type and --endpoint-type).
parser.add_argument(
'--os-service-type', metavar='<os-service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('Defaults to env[OS_SERVICEVM_SERVICE_TYPE] or servicevm.'))
parser.add_argument(
'--os-endpoint-type', metavar='<os-endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
# FIXME(bklei): --service-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('DEPRECATED! Use --os-service-type.'))
# FIXME(bklei): --endpoint-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('DEPRECATED! Use --os-endpoint-type.'))
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('DEPRECATED! Only keystone is supported.'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL, defaults to env[OS_AUTH_URL].'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
project_name_group = parser.add_mutually_exclusive_group()
project_name_group.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name, defaults to '
'env[OS_TENANT_NAME].'))
project_name_group.add_argument(
'--os-project-name',
metavar='<auth-project-name>',
default=utils.env('OS_PROJECT_NAME'),
help='Another way to specify tenant name. '
'This option is mutually exclusive with '
' --os-tenant-name. '
'Defaults to env[OS_PROJECT_NAME].')
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
project_id_group = parser.add_mutually_exclusive_group()
project_id_group.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant ID, defaults to '
'env[OS_TENANT_ID].'))
project_id_group.add_argument(
'--os-project-id',
metavar='<auth-project-id>',
default=utils.env('OS_PROJECT_ID'),
help='Another way to specify tenant ID. '
'This option is mutually exclusive with '
' --os-tenant-id. '
'Defaults to env[OS_PROJECT_ID].')
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username, defaults to env[OS_USERNAME].'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-id', metavar='<auth-user-id>',
default=env('OS_USER_ID'),
help=_('Authentication user ID (Env: OS_USER_ID)'))
parser.add_argument(
'--os_user_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-id',
metavar='<auth-user-domain-id>',
default=utils.env('OS_USER_DOMAIN_ID'),
help='OpenStack user domain ID. '
'Defaults to env[OS_USER_DOMAIN_ID].')
parser.add_argument(
'--os_user_domain_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-name',
metavar='<auth-user-domain-name>',
default=utils.env('OS_USER_DOMAIN_NAME'),
help='OpenStack user domain name. '
'Defaults to env[OS_USER_DOMAIN_NAME].')
parser.add_argument(
'--os_user_domain_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-project-domain-id',
metavar='<auth-project-domain-id>',
default=utils.env('OS_PROJECT_DOMAIN_ID'),
help='Defaults to env[OS_PROJECT_DOMAIN_ID].')
parser.add_argument(
'--os-project-domain-name',
metavar='<auth-project-domain-name>',
default=utils.env('OS_PROJECT_DOMAIN_NAME'),
help='Defaults to env[OS_PROJECT_DOMAIN_NAME].')
parser.add_argument(
'--os-cert',
metavar='<certificate>',
default=utils.env('OS_CERT'),
help=_("Path of certificate file to use in SSL "
"connection. This file can optionally be "
"prepended with the private key. Defaults "
"to env[OS_CERT]."))
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]."))
parser.add_argument(
'--os-key',
metavar='<key>',
default=utils.env('OS_KEY'),
help=_("Path of client key to use in SSL "
"connection. This option is not necessary "
"if your key is prepended to your certificate "
"file. Defaults to env[OS_KEY]."))
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password, defaults to env[OS_PASSWORD].'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name, defaults to '
'env[OS_REGION_NAME].'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Authentication token, defaults to env[OS_TOKEN].'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--http-timeout', metavar='<seconds>',
default=env('OS_NETWORK_TIMEOUT', default=None), type=float,
help=_('Timeout in seconds to wait for an HTTP response. Defaults '
'to env[OS_NETWORK_TIMEOUT] or None if not specified.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL].'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--insecure',
action='store_true',
default=env('TACKERCLIENT_INSECURE', default=False),
help=_("Explicitly allow tackerclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def _register_extensions(self, version):
for name, module in itertools.chain(
client_extension._discover_via_entry_points()):
self._extend_shell_commands(module, version)
def _extend_shell_commands(self, module, version):
classes = inspect.getmembers(module, inspect.isclass)
for cls_name, cls in classes:
if (issubclass(cls, client_extension.TackerClientExtension) and
hasattr(cls, 'shell_command')):
cmd = cls.shell_command
if hasattr(cls, 'versions'):
if version not in cls.versions:
continue
try:
self.command_manager.add_command(cmd, cls)
self.commands[version][cmd] = cls
except TypeError:
pass
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion' and help_command_pos == -1:
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(err)
raise
else:
self.log.error(err)
return 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
return self.interact()
return self.run_subcommand(remainder)
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as e:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception("%s", e)
raise
self.log.error("%s", e)
return 1
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"
" when providing a service URL"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"
" when providing a token"))
else:
# Validate password flow auth
project_info = (self.options.os_tenant_name or
self.options.os_tenant_id or
(self.options.os_project_name and
(self.options.os_project_domain_name or
self.options.os_project_domain_id)) or
self.options.os_project_id)
if (not self.options.os_username
and not self.options.os_user_id):
raise exc.CommandError(
_("You must provide a username or user ID via"
" --os-username, env[OS_USERNAME] or"
" --os-user-id, env[OS_USER_ID]"))
if not self.options.os_password:
# No password, If we've got a tty, try prompting for it
if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty():
# Check for Ctl-D
try:
self.options.os_password = getpass.getpass(
'OS Password: ')
except EOFError:
pass
# No password because we didn't have a tty or the
# user Ctl-D when prompted.
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not project_info):
# tenent is deprecated in Keystone v3. Use the latest
# terminology instead.
raise exc.CommandError(
_("You must provide a project_id or project_name ("
"with project_domain_name or project_domain_id) "
"via "
" --os-project-id (env[OS_PROJECT_ID])"
" --os-project-name (env[OS_PROJECT_NAME]),"
" --os-project-domain-id "
"(env[OS_PROJECT_DOMAIN_ID])"
" --os-project-domain-name "
"(env[OS_PROJECT_DOMAIN_NAME])"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
auth_session = self._get_keystone_session()
auth = auth_session.auth
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
auth_session = None
auth = None
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
user_id=self.options.os_user_id,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
# FIXME (bklei) honor deprecated service_type and
# endpoint type until they are removed
service_type=self.options.os_service_type or
self.options.service_type,
endpoint_type=self.options.os_endpoint_type or self.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
timeout=self.options.http_timeout,
retries=self.options.retries,
raise_errors=False,
session=auth_session,
auth=auth,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(TackerShell, self).initialize_app(argv)
self.api_version = {'servicevm': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
# The default log level is INFO, in this situation, set the
# log level of the console to WARNING, to avoid displaying
# useless messages. This equals using "--quiet"
if console_level == logging.INFO:
console.setLevel(logging.WARNING)
else:
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
logging.getLogger('iso8601.iso8601').setLevel(logging.WARNING)
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def get_v2_auth(self, v2_auth_url):
return v2_auth.Password(
v2_auth_url,
username=self.options.os_username,
password=self.options.os_password,
tenant_id=self.options.os_tenant_id,
tenant_name=self.options.os_tenant_name)
def get_v3_auth(self, v3_auth_url):
project_id = self.options.os_project_id or self.options.os_tenant_id
project_name = (self.options.os_project_name or
self.options.os_tenant_name)
return v3_auth.Password(
v3_auth_url,
username=self.options.os_username,
password=self.options.os_password,
user_id=self.options.os_user_id,
user_domain_name=self.options.os_user_domain_name,
user_domain_id=self.options.os_user_domain_id,
project_id=project_id,
project_name=project_name,
project_domain_name=self.options.os_project_domain_name,
project_domain_id=self.options.os_project_domain_id
)
def _discover_auth_versions(self, session, auth_url):
# discover the API versions the server is supporting base on the
# given URL
try:
ks_discover = discover.Discover(session=session, auth_url=auth_url)
return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0'))
except ks_exc.ClientException:
# Identity service may not support discover API version.
# Lets try to figure out the API version from the original URL.
url_parts = urlparse.urlparse(auth_url)
(scheme, netloc, path, params, query, fragment) = url_parts
path = path.lower()
if path.startswith('/v3'):
return (None, auth_url)
elif path.startswith('/v2'):
return (auth_url, None)
else:
# not enough information to determine the auth version
msg = _('Unable to determine the Keystone version '
'to authenticate with using the given '
'auth_url. Identity service may not support API '
'version discovery. Please provide a versioned '
'auth_url instead.')
raise exc.CommandError(msg)
def _get_keystone_session(self):
# first create a Keystone session
cacert = self.options.os_cacert or None
cert = self.options.os_cert or None
key = self.options.os_key or None
insecure = self.options.insecure or False
ks_session = session.Session.construct(dict(cacert=cacert,
cert=cert,
key=key,
insecure=insecure))
# discover the supported keystone versions using the given url
(v2_auth_url, v3_auth_url) = self._discover_auth_versions(
session=ks_session,
auth_url=self.options.os_auth_url)
# Determine which authentication plugin to use. First inspect the
# auth_url to see the supported version. If both v3 and v2 are
# supported, then use the highest version if possible.
user_domain_name = self.options.os_user_domain_name or None
user_domain_id = self.options.os_user_domain_id or None
project_domain_name = self.options.os_project_domain_name or None
project_domain_id = self.options.os_project_domain_id or None
domain_info = (user_domain_name or user_domain_id or
project_domain_name or project_domain_id)
if (v2_auth_url and not domain_info) or not v3_auth_url:
ks_session.auth = self.get_v2_auth(v2_auth_url)
else:
ks_session.auth = self.get_v3_auth(v3_auth_url)
return ks_session
def main(argv=sys.argv[1:]):
try:
return TackerShell(TACKER_API_VERSION).run(
list(map(encodeutils.safe_decode, argv)))
except KeyboardInterrupt:
print("... terminating tacker client", file=sys.stderr)
return 130
except exc.TackerClientException:
return 1
except Exception as e:
print(e)
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.db import migrations
def clean_list(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
return json.loads(val)
return val
def clean_int(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
return int(val)
return val
def clean_number(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
try: # it's an int
return int(val)
except ValueError:
pass
try: # it's a float
return float(val)
except ValueError:
pass
# cannot convert to number, returns string or None
return val
def clean_values(apps, schema_editor):
Observation = apps.get_model("contributions", "Observation")
NumericField = apps.get_model("categories", "NumericField")
LookupField = apps.get_model("categories", "LookupField")
MultipleLookupField = apps.get_model("categories", "MultipleLookupField")
for field in NumericField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_number(value)
observation.save()
for field in LookupField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_int(value)
observation.save()
for field in MultipleLookupField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_list(value)
observation.save()
class Migration(migrations.Migration):
dependencies = [
('contributions', '0009_auto_20150420_1549'),
]
operations = [
migrations.RunPython(clean_values),
]
|
# TestSwiftMetatype.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""
Test the formatting of Swift metatypes
"""
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
import lldbsuite.test.lldbutil as lldbutil
import os
import unittest2
class TestSwiftMetatype(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@swiftTest
def test_metatype(self):
"""Test the formatting of Swift metatypes"""
self.build()
target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(
self, 'Set breakpoint here', lldb.SBFileSpec('main.swift'))
frame = thread.frames[0]
self.assertTrue(frame, "Frame 0 is valid.")
var_s = frame.FindVariable("s")
var_c = frame.FindVariable("c")
var_f = frame.FindVariable("f")
var_t = frame.FindVariable("t")
var_p = frame.FindVariable("p")
lldbutil.check_variable(self, var_s, False, "String")
lldbutil.check_variable(self, var_c, False, "a.D")
lldbutil.check_variable(self, var_f, False, "(Int) -> Int")
lldbutil.check_variable(self, var_t, False, "(Int, Int, String)")
lldbutil.check_variable(self, var_p, False, "a.P")
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lldb.SBDebugger.Terminate)
unittest2.main()
|
#!/usr/bin/env python3
import sys, os, datetime, calendar, compile_gpx, pytz
import numpy as np
from scipy.io import wavfile
from housepy import log, config, util, drawing, science, crashdb
MEDIA_LENGTH = 2090.43 # hack per video to get everything to match correctly
log.info("Starting...")
if len(sys.argv) < 2 or not os.path.isdir(sys.argv[1]):
print("[data_folder]")
exit()
directory = sys.argv[1]
gpx_filename = None
wav_filename = None
for filename in os.listdir(directory):
if filename[-4:] == ".gpx":
gpx_filename = os.path.join(directory, filename)
if filename[-4:] == ".wav":
wav_filename = os.path.join(directory, filename)
log.info("GPX %s" % gpx_filename)
log.info("WAV %s" % wav_filename)
audio_start_dt = datetime.datetime.strptime(wav_filename.split('.')[0].split('/')[-1].replace('_smp', ''), "%Y%m%d %H%M%S")
audio_start_dt = util.to_utc(audio_start_dt)
# get video times
video_start_t, video_end_t = compile_gpx.get_video_times(gpx_filename)
log.info("AUDIO START %s" % audio_start_dt)
audio_start_t = float(calendar.timegm(audio_start_dt.timetuple()))
sample_rate, data = wavfile.read(wav_filename)
log.info("AUDIO SAMPLE RATE %s" % sample_rate)
log.info("AUDIO LENGTH (samples) %s" % len(data))
seconds = float(len(data)) / sample_rate
log.info("AUDIO DURATION %s" % util.format_time(seconds))
skip = video_start_t - audio_start_t
log.info("AUDIO SKIP %s%s" % ('-' if skip < 0 else '', util.format_time(abs(skip))))
# downsample to 60hz
target_sample_rate = 60.0
signal = science.downsample(data, int(sample_rate / target_sample_rate))
log.info("NEW LENGTH (samples) %s" % len(signal))
average = np.average(signal)
reduced = signal - average
reduced = [x if x >= 0 else 0 for x in reduced]
reduced = science.smooth(reduced, window_len=50)
reduced = science.normalize(reduced)
signal = science.normalize(signal)
log.info("DETECTING PEAKS")
# the lookahead is key. dont want to see two peaks, but not too small
# in this case, a breath a second?
max_peaks, min_peaks = science.detect_peaks(reduced, lookahead=60)
breaths = []
for peak in max_peaks:
sample, y = peak
t = sample / target_sample_rate
t -= skip
if t < 0:
continue
if t > MEDIA_LENGTH:
continue
breaths.append(t)
log.info("SAVING")
db = crashdb.load("data.json")
db['breaths'] = breaths
db.close()
num_samples = len(signal)
ctx = drawing.Context(10000, 500, relative=True, flip=True, hsv=True)
ctx.line([(float(i) / num_samples, signal[i]) for i in range(num_samples)], stroke=(0., 0., 0.85), thickness=2)
ctx.line([(float(i) / num_samples, reduced[i]) for i in range(num_samples)], stroke=(0.55, 1., 1.), thickness=2)
for peak in max_peaks:
sample, y = peak
ctx.arc(float(sample) / num_samples, y, 5. / ctx.width, 5. / ctx.height, thickness=0., fill=(0., 1., 1.))
ctx.show()
ctx.image.save("breaths.png", 'PNG')
|
# -*- coding: utf8 -*-
from __future__ import division, print_function, unicode_literals
from io import BytesIO
from gd3 import Gd3, Gd3Error
import unittest
GD3_ENCODING = 'utf-16'
class TestGd3(unittest.TestCase):
def __write_int32(self, buffer, value):
data = bytearray()
data.append(value & 0xff)
data.append((value >> 8) & 0xff)
data.append((value >> 16) & 0xff)
data.append((value >> 24) & 0xff)
return buffer.write(data)
def __write_string(self, buffer, value):
data = bytearray((value+"\0").encode(GD3_ENCODING))
return buffer.write(data)
def create_data(self):
tag = BytesIO()
self.__write_string(tag, "Track Name")
self.__write_string(tag, "トラック名")
self.__write_string(tag, "Game Name")
self.__write_string(tag, "ゲーム名")
self.__write_string(tag, "System Name")
self.__write_string(tag, "システム名")
self.__write_string(tag, "Original Track Author")
self.__write_string(tag, "作曲者")
self.__write_string(tag, "2017/02/01")
self.__write_string(tag, "John Doe")
self.__write_string(tag, "This is a test data!")
tag_data = tag.getvalue()
data = BytesIO()
data.write("\0\0\0\0".encode('latin-1')) # padding (dummy)
data.write("Gd3 ".encode('latin-1'))
self.__write_int32(data, 0x1234) # version
self.__write_int32(data, len(tag_data)) # size
data.write(tag_data)
data.seek(4) # skip padding data
return data
def test_init_ok(self):
data = self.create_data()
testee = Gd3(data)
self.assertEqual(testee.track_name_en, "Track Name")
self.assertEqual(testee.track_name_ja, "トラック名")
self.assertEqual(testee.game_name_en, "Game Name")
self.assertEqual(testee.game_name_ja, "ゲーム名")
self.assertEqual(testee.system_name_en, "System Name")
self.assertEqual(testee.system_name_ja, "システム名")
self.assertEqual(testee.original_track_author_en, "Original Track Author")
self.assertEqual(testee.original_track_author_ja, "作曲者")
self.assertEqual(testee.released_at, "2017/02/01")
self.assertEqual(testee.converted_by, "John Doe")
self.assertEqual(testee.notes, "This is a test data!")
def test_init_error(self):
data = BytesIO()
data.write("\0\0\0\0".encode('latin-1'))
data.seek(0)
with self.assertRaises(Gd3Error) as cm:
testee = Gd3(data)
e = cm.exception
self.assertEqual(e.message, "Invalid GD3 identifier.")
if __name__ == '__main__':
unittest.main()
|
"""
Given a string S and a string T, find the minimum window in S which will contain all the characters in T in complexity
O(n).
For example,
S = "ADOBECODEBANC"
T = "ABC"
Minimum window is "BANC".
Note:
If there is no such window in S that covers all characters in T, return the emtpy string "".
If there are multiple such windows, you are guaranteed that there will always be only one unique minimum window in S.
"""
import sys
__author__ = 'Danyang'
class Solution(object):
def minWindow(self, S, T):
"""
Algorithm:
two pointers
Aggressively enclose the chars until find all T, and then shrink the window as far as possible
:param S: str
:param T: str
:return: str
"""
min_win = [0, sys.maxint] # [start, end)
w_cnt = [0 for _ in range(256)] # window
t_cnt = [0 for _ in range(256)] # 256 ascii, static
for char in T:
t_cnt[ord(char)] += 1
appeared_cnt = 0
lo = 0
for hi in xrange(1, len(S)+1):
# expand
val = S[hi-1]
if t_cnt[ord(val)] > 0:
w_cnt[ord(val)] += 1
if t_cnt[ord(val)] > 0 and w_cnt[ord(val)] <= t_cnt[ord(val)]:
appeared_cnt += 1 # cache, determine when to decrease appeared_cnt
# shrink
if appeared_cnt == len(T): # until find all
while w_cnt[ord(S[lo])] > t_cnt[ord(S[lo])] or t_cnt[ord(S[lo])] == 0:
if w_cnt[ord(S[lo])] > 0: w_cnt[ord(S[lo])] -= 1
lo += 1
if min_win[1]-min_win[0] > hi-lo:
min_win[0], min_win[1] = lo, hi
if min_win[1] == sys.maxint:
return ""
else:
return S[min_win[0]:min_win[1]]
if __name__ == "__main__":
assert Solution().minWindow("ADOBECODEBANC", "ABC") == "BANC"
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8
# License: GPL v3 Copyright: 2018, Kovid Goyal <kovid at kovidgoyal.net>
import sys
from contextlib import contextmanager
from functools import wraps
from typing import (
IO, Any, Callable, Dict, Generator, Optional, Tuple, TypeVar, Union
)
from kitty.rgb import Color, color_as_sharp, to_color
from kitty.typing import GraphicsCommandType, HandlerType, ScreenSize
from .operations_stub import CMD
GraphicsCommandType, ScreenSize # needed for stub generation
S7C1T = '\033 F'
SAVE_CURSOR = '\0337'
RESTORE_CURSOR = '\0338'
SAVE_PRIVATE_MODE_VALUES = '\033[?s'
RESTORE_PRIVATE_MODE_VALUES = '\033[?r'
SAVE_COLORS = '\033[#P'
RESTORE_COLORS = '\033[#Q'
MODES = dict(
LNM=(20, ''),
IRM=(4, ''),
DECKM=(1, '?'),
DECSCNM=(5, '?'),
DECOM=(6, '?'),
DECAWM=(7, '?'),
DECARM=(8, '?'),
DECTCEM=(25, '?'),
MOUSE_BUTTON_TRACKING=(1000, '?'),
MOUSE_MOTION_TRACKING=(1002, '?'),
MOUSE_MOVE_TRACKING=(1003, '?'),
FOCUS_TRACKING=(1004, '?'),
MOUSE_UTF8_MODE=(1005, '?'),
MOUSE_SGR_MODE=(1006, '?'),
MOUSE_URXVT_MODE=(1015, '?'),
ALTERNATE_SCREEN=(1049, '?'),
BRACKETED_PASTE=(2004, '?'),
)
F = TypeVar('F')
all_cmds: Dict[str, Callable] = {}
def cmd(f: F) -> F:
all_cmds[f.__name__] = f # type: ignore
return f
@cmd
def set_mode(which: str, private: bool = True) -> str:
num, private_ = MODES[which]
return '\033[{}{}h'.format(private_, num)
@cmd
def reset_mode(which: str) -> str:
num, private = MODES[which]
return '\033[{}{}l'.format(private, num)
@cmd
def clear_screen() -> str:
return '\033[H\033[2J'
@cmd
def clear_to_end_of_screen() -> str:
return '\033[J'
@cmd
def clear_to_eol() -> str:
return '\033[K'
@cmd
def reset_terminal() -> str:
return '\033]\033\\\033c'
@cmd
def bell() -> str:
return '\a'
@cmd
def beep() -> str:
return '\a'
@cmd
def set_window_title(value: str) -> str:
return '\033]2;' + value.replace('\033', '').replace('\x9c', '') + '\033\\'
@cmd
def set_line_wrapping(yes_or_no: bool) -> str:
return set_mode('DECAWM') if yes_or_no else reset_mode('DECAWM')
@cmd
def set_cursor_visible(yes_or_no: bool) -> str:
return set_mode('DECTCEM') if yes_or_no else reset_mode('DECTCEM')
@cmd
def set_cursor_position(x: int, y: int) -> str: # (0, 0) is top left
return '\033[{};{}H'.format(y + 1, x + 1)
@cmd
def move_cursor_by(amt: int, direction: str) -> str:
suffix = {'up': 'A', 'down': 'B', 'right': 'C', 'left': 'D'}[direction]
return f'\033[{amt}{suffix}'
@cmd
def set_cursor_shape(shape: str = 'block', blink: bool = True) -> str:
val = {'block': 1, 'underline': 3, 'bar': 5}.get(shape, 1)
if not blink:
val += 1
return '\033[{} q'.format(val)
@cmd
def set_scrolling_region(screen_size: Optional['ScreenSize'] = None, top: Optional[int] = None, bottom: Optional[int] = None) -> str:
if screen_size is None:
return '\033[r'
if top is None:
top = 0
if bottom is None:
bottom = screen_size.rows - 1
if bottom < 0:
bottom = screen_size.rows - 1 + bottom
else:
bottom += 1
return '\033[{};{}r'.format(top + 1, bottom + 1)
@cmd
def scroll_screen(amt: int = 1) -> str:
return '\033[' + str(abs(amt)) + ('T' if amt < 0 else 'S')
STANDARD_COLORS = {name: i for i, name in enumerate(
'black red green yellow blue magenta cyan gray'.split())}
STANDARD_COLORS['white'] = STANDARD_COLORS['gray']
UNDERLINE_STYLES = {name: i + 1 for i, name in enumerate(
'straight double curly'.split())}
ColorSpec = Union[int, str, Tuple[int, int, int]]
def color_code(color: ColorSpec, intense: bool = False, base: int = 30) -> str:
if isinstance(color, str):
e = str((base + 60 if intense else base) + STANDARD_COLORS[color])
elif isinstance(color, int):
e = '{}:5:{}'.format(base + 8, max(0, min(color, 255)))
else:
e = '{}:2:{}:{}:{}'.format(base + 8, *color)
return e
@cmd
def sgr(*parts: str) -> str:
return '\033[{}m'.format(';'.join(parts))
@cmd
def colored(
text: str,
color: ColorSpec,
intense: bool = False,
reset_to: Optional[ColorSpec] = None,
reset_to_intense: bool = False
) -> str:
e = color_code(color, intense)
return '\033[{}m{}\033[{}m'.format(e, text, 39 if reset_to is None else color_code(reset_to, reset_to_intense))
@cmd
def faint(text: str) -> str:
return colored(text, 'black', True)
@cmd
def styled(
text: str,
fg: Optional[ColorSpec] = None,
bg: Optional[ColorSpec] = None,
fg_intense: bool = False,
bg_intense: bool = False,
italic: Optional[bool] = None,
bold: Optional[bool] = None,
underline: Optional[str] = None,
underline_color: Optional[ColorSpec] = None,
reverse: Optional[bool] = None
) -> str:
start, end = [], []
if fg is not None:
start.append(color_code(fg, fg_intense))
end.append('39')
if bg is not None:
start.append(color_code(bg, bg_intense, 40))
end.append('49')
if underline_color is not None:
if isinstance(underline_color, str):
underline_color = STANDARD_COLORS[underline_color]
start.append(color_code(underline_color, base=50))
end.append('59')
if underline is not None:
start.append('4:{}'.format(UNDERLINE_STYLES[underline]))
end.append('4:0')
if italic is not None:
s, e = (start, end) if italic else (end, start)
s.append('3')
e.append('23')
if bold is not None:
s, e = (start, end) if bold else (end, start)
s.append('1')
e.append('22')
if reverse is not None:
s, e = (start, end) if reverse else (end, start)
s.append('7')
e.append('27')
if not start:
return text
return '\033[{}m{}\033[{}m'.format(';'.join(start), text, ';'.join(end))
def serialize_gr_command(cmd: Dict[str, Union[int, str]], payload: Optional[bytes] = None) -> bytes:
from .images import GraphicsCommand
gc = GraphicsCommand()
for k, v in cmd.items():
setattr(gc, k, v)
return gc.serialize(payload or b'')
@cmd
def gr_command(cmd: Union[Dict, 'GraphicsCommandType'], payload: Optional[bytes] = None) -> str:
if isinstance(cmd, dict):
raw = serialize_gr_command(cmd, payload)
else:
raw = cmd.serialize(payload or b'')
return raw.decode('ascii')
@cmd
def clear_images_on_screen(delete_data: bool = False) -> str:
from .images import GraphicsCommand
gc = GraphicsCommand()
gc.a = 'd'
gc.d = 'A' if delete_data else 'a'
return gc.serialize().decode('ascii')
def init_state(alternate_screen: bool = True) -> str:
ans = (
S7C1T + SAVE_CURSOR + SAVE_PRIVATE_MODE_VALUES + reset_mode('LNM') +
reset_mode('IRM') + reset_mode('DECKM') + reset_mode('DECSCNM') +
set_mode('DECARM') + set_mode('DECAWM') +
set_mode('DECTCEM') + reset_mode('MOUSE_BUTTON_TRACKING') +
reset_mode('MOUSE_MOTION_TRACKING') + reset_mode('MOUSE_MOVE_TRACKING') +
reset_mode('FOCUS_TRACKING') + reset_mode('MOUSE_UTF8_MODE') +
reset_mode('MOUSE_SGR_MODE') + reset_mode('MOUSE_UTF8_MODE') +
set_mode('BRACKETED_PASTE') + SAVE_COLORS +
'\033[*x' # reset DECSACE to default region select
)
if alternate_screen:
ans += set_mode('ALTERNATE_SCREEN') + reset_mode('DECOM')
ans += clear_screen()
ans += '\033[>31u' # extended keyboard mode
return ans
def reset_state(normal_screen: bool = True) -> str:
ans = ''
ans += '\033[<u' # restore keyboard mode
if normal_screen:
ans += reset_mode('ALTERNATE_SCREEN')
ans += RESTORE_PRIVATE_MODE_VALUES
ans += RESTORE_CURSOR
ans += RESTORE_COLORS
return ans
@contextmanager
def cursor(write: Callable[[str], None]) -> Generator[None, None, None]:
write(SAVE_CURSOR)
yield
write(RESTORE_CURSOR)
@contextmanager
def alternate_screen(f: Optional[IO[str]] = None) -> Generator[None, None, None]:
f = f or sys.stdout
print(set_mode('ALTERNATE_SCREEN'), end='', file=f)
yield
print(reset_mode('ALTERNATE_SCREEN'), end='', file=f)
@contextmanager
def raw_mode(fd: Optional[int] = None) -> Generator[None, None, None]:
import tty
import termios
if fd is None:
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd)
try:
tty.setraw(fd)
yield
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old)
@cmd
def set_default_colors(
fg: Optional[Union[Color, str]] = None,
bg: Optional[Union[Color, str]] = None,
cursor: Optional[Union[Color, str]] = None,
select_bg: Optional[Union[Color, str]] = None,
select_fg: Optional[Union[Color, str]] = None
) -> str:
ans = ''
def item(which: Optional[Union[Color, str]], num: int) -> None:
nonlocal ans
if which is None:
ans += '\x1b]1{}\x1b\\'.format(num)
else:
if isinstance(which, Color):
q = color_as_sharp(which)
else:
x = to_color(which)
assert x is not None
q = color_as_sharp(x)
ans += '\x1b]{};{}\x1b\\'.format(num, q)
item(fg, 10)
item(bg, 11)
item(cursor, 12)
item(select_bg, 17)
item(select_fg, 19)
return ans
@cmd
def write_to_clipboard(data: Union[str, bytes], use_primary: bool = False) -> str:
if isinstance(data, str):
data = data.encode('utf-8')
from base64 import standard_b64encode
fmt = 'p' if use_primary else 'c'
def esc(chunk: str) -> str:
return '\x1b]52;{};{}\x07'.format(fmt, chunk)
ans = esc('!') # clear clipboard buffer
for chunk in (data[i:i+512] for i in range(0, len(data), 512)):
s = standard_b64encode(chunk).decode('ascii')
ans += esc(s)
return ans
@cmd
def request_from_clipboard(use_primary: bool = False) -> str:
return '\x1b]52;{};?\x07'.format('p' if use_primary else 'c')
# Boilerplate to make operations available via Handler.cmd {{{
def writer(handler: HandlerType, func: Callable) -> Callable:
@wraps(func)
def f(*a: Any, **kw: Any) -> None:
handler.write(func(*a, **kw))
return f
def commander(handler: HandlerType) -> CMD:
ans = CMD()
for name, func in all_cmds.items():
setattr(ans, name, writer(handler, func))
return ans
def func_sig(func: Callable) -> Generator[str, None, None]:
import inspect
import re
s = inspect.signature(func)
for val in s.parameters.values():
yield re.sub(r'ForwardRef\([\'"](\w+?)[\'"]\)', r'\1', str(val).replace('NoneType', 'None'))
def as_type_stub() -> str:
ans = [
'from typing import * # noqa',
'from kitty.typing import GraphicsCommandType, ScreenSize',
'from kitty.rgb import Color',
'import kitty.rgb',
]
methods = []
for name, func in all_cmds.items():
args = ', '.join(func_sig(func))
if args:
args = ', ' + args
methods.append(' def {}(self{}) -> str: pass'.format(name, args))
ans += ['', '', 'class CMD:'] + methods
return '\n'.join(ans) + '\n\n\n'
# }}}
|
from django.core.urlresolvers import reverse
from django.forms.models import modelformset_factory, modelform_factory
from django.http import HttpResponse
from django.shortcuts import redirect
from django.template import loader
from django_ajax.decorators import ajax
from corporate.auxiliary_scripts import update_resume_zips
from corporate.forms import AddContactForm, ContactFormSet
from corporate.models import CorporateTextField, CorporateResourceGuide
from corporate.models import CompanyContact, Company, JobField, CorporateEmail
from mig_main.utility import get_message_dict, Permissions
FORM_ERROR = 'Your submision contained errors, please correct and resubmit.'
def get_permissions(user):
permission_dict = {
'can_edit_corporate': Permissions.can_edit_corporate_page(user),
'can_add_contact': Permissions.can_add_corporate_contact(user),
'can_edit_contacts': Permissions.can_edit_corporate_page(user),
'can_add_company': Permissions.can_add_company(user),
}
return permission_dict
def get_common_context(request):
context_dict = get_message_dict(request)
contact_text = CorporateTextField.objects.filter(section='CT')
context_dict.update({
'request': request,
'contact_text': contact_text,
'main_nav': 'corporate',
})
return context_dict
def index(request):
request.session['current_page'] = request.path
template = loader.get_template('corporate/corporate.html')
involvement_text = CorporateTextField.objects.filter(section='OP')
context_dict = {
'involvement_text': involvement_text,
'subnav': 'index',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
return HttpResponse(template.render(context_dict, request))
def resumes(request):
request.session['current_page'] = request.path
template = loader.get_template('corporate/resume_book.html')
context_dict = {
'by_major_zip': 'TBP_resumes_by_major.zip',
'by_year_zip': 'TBP_resumes_by_year.zip',
'subnav': 'resumes',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
return HttpResponse(template.render(context_dict, request))
def update_corporate_page(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to edit the corporate page'
return redirect('corporate:index')
prefix = 'corporate_page'
CorporateTextForm = modelformset_factory(CorporateTextField,
extra=1, exclude=[])
formset = CorporateTextForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if formset.is_valid():
instances = formset.save()
request.session['success_message'] = 'Corporate page successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'formset': formset,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update Corporate Page',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit Corporate Page Text',
'help_text': ('The text shown on the corporate main page. This text '
'uses markdown syntax.'),
'can_add_row': False,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_formset.html')
return HttpResponse(template.render(context_dict, request))
def update_resource_guide(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to edit the corporate page'
return redirect('corporate:index')
ResourceGuideForm = modelform_factory(CorporateResourceGuide, exclude=('active',))
if request.method == 'POST':
form = ResourceGuideForm(request.POST, request.FILES)
if form.is_valid():
instance = form.save(commit=False)
previously_active_guides = CorporateResourceGuide.objects.filter(active=True)
for guide in previously_active_guides:
guide.active = False
guide.save()
instance.active = True
instance.save()
update_resume_zips()
request.session['success_message'] = 'Corporate resource guide successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
else:
form = ResourceGuideForm()
context_dict = {
'form': form,
'subnav': 'index',
'has_files': True,
'submit_name': 'Update Corporate Resource Guide',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit Corporate Resource Guide',
'help_text': ('This guide is inluded in the resume zip files. Update '
'it when the information (or the officer) changes.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def add_company_contact(request):
if not Permissions.can_add_corporate_contact(request.user):
request.session['error_message'] = 'You are not authorized to add company contacts'
return redirect('corporate:index')
prefix = 'corporate_page'
can_edit = Permissions.can_edit_corporate_page(request.user)
form = AddContactForm(request.POST or None,prefix=prefix,can_edit=can_edit)
if request.method == 'POST':
if form.is_valid():
if form.is_overdetermined():
request.session['warning_message'] = 'Name, email, phone, bio, and chapter are ignored when profile provided.'
instance = form.save()
request.session['success_message'] = 'Corporate contact successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
help_text = 'Add a contact to the company contacts database.'
if not can_edit:
help_text = help_text + (' Note: you are adding a suggested contact; '
'they will not be emailed unless approved by '
'the Corporate Relations Officer.')
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add company contact',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add company contact',
'help_text': help_text,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def edit_company_contacts(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to add company contacts'
return redirect('corporate:index')
prefix = 'corporate_page'
formset = ContactFormSet(request.POST or None,prefix=prefix,initial=CompanyContact.get_contacts())
if request.method == 'POST':
if formset.is_valid():
overdetermined = formset.save()
if overdetermined:
request.session['warning_message'] = 'Name, email, phone, bio, and chapter are ignored when profile provided.'
request.session['success_message'] = 'Corporate contact successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'formset': formset,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update company contacts',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit company contacts',
'help_text': ('Edit the list of company contacts. '
'Contact info is ignored if a profile is provided.'),
'can_add_row':True,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_formset.html')
return HttpResponse(template.render(context_dict, request))
def add_company(request):
if not Permissions.can_add_company(request.user):
request.session['error_message'] = 'You are not authorized to add companies'
return redirect('corporate:index')
prefix = 'corporate_page'
AddCompanyForm = modelform_factory(Company, exclude=[])
form = AddCompanyForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save()
request.session['success_message'] = 'Company successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add company',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add company',
'help_text': ('Add company information. If the appropriate industry '
'is not present, you need to add that first'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def add_jobfield(request):
if not Permissions.can_add_company(request.user):
request.session['error_message'] = 'You are not authorized to add industries'
return redirect('corporate:index')
prefix = 'corporate_page'
AddIndustryForm = modelform_factory(JobField, exclude=[])
form = AddIndustryForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save()
request.session['success_message'] = 'Industry successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add industry',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add industry',
'help_text': ('Add industry information. Select all relevant majors.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def view_company_contacts(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to view company contacts'
return redirect('corporate:index')
context_dict = {
'contacts': CompanyContact.get_contacts(),
'subnav': 'index',
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('corporate/contacts_table.html')
return HttpResponse(template.render(context_dict, request))
def view_and_send_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return redirect('corporate:index')
existing_email = CorporateEmail.objects.filter(active=True)
if existing_email.exists():
existing_email = existing_email[0]
else:
request.session['error_message'] = 'No email specified'
return redirect('corporate:index')
contacts = CompanyContact.get_contacts(gets_email=True)
context_dict = {
'contacts': contacts,
'email':existing_email.preview_email(),
'mig_alum_email':existing_email.preview_email(mig_alum=True),
'other_alum_email':existing_email.preview_email(other_alum=True),
'previous_contact_email':existing_email.preview_email(previous_contact=True),
'personal_contact_email':existing_email.preview_email(personal_contact=True),
'subnav': 'index',
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('corporate/view_and_send_email.html')
return HttpResponse(template.render(context_dict, request))
@ajax
def send_corporate_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-danger">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Error:</strong>%s</div>'''%(request.session.pop('error_message'))}}
existing_email = CorporateEmail.objects.filter(active=True)
if existing_email.exists():
existing_email[0].send_corporate_email()
request.session['success_message']='Companies successfully emailed'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-success">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Success:</strong>%s</div>'''%(request.session.pop('success_message'))}}
else:
request.session['error_message'] = 'Company email text does not exist'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-danger">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Error:</strong>%s</div>'''%(request.session.pop('error_message'))}}
def update_corporate_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return redirect('corporate:index')
prefix = 'corporate_email'
existing_email = CorporateEmail.objects.filter(active=True)
UpdateEmailForm = modelform_factory(CorporateEmail, exclude=[])
if existing_email.exists():
form = UpdateEmailForm(request.POST or None,prefix=prefix,instance=existing_email[0])
else:
form = UpdateEmailForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save(commit=False)
instance.id=None
instance.pk=None
instance.save()
if existing_email.exists():
ex=existing_email[0]
ex.active=False
ex.save()
request.session['success_message'] = 'Company email successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update corporate email',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Update corporate email',
'help_text': ('Update the email sent to companies to encourage their'
'participation in TBP corporate events.\n\nUse '
'{{company_name}} in the subject line as a placeholder'
'and {{extra_text}} in the body as a placeholder for the'
'extra text to members or personal contacts.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-long-lambda
"""Tests for tensorflow.ops.control_flow_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python.framework import function
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import logging_ops
def check_op_order(graph):
"""Sanity check on the ordering of op id."""
for op in graph.get_operations():
for v in op.inputs:
assert v.op._id < op._id or op.type == "Merge", (
"The id of %s must be less than the id of %s" % (v.op.name, op.name))
return True
def check_consumers(graph):
"""Sanity check on the consumer list of the tensors."""
consumer_count = {}
for op in graph.get_operations():
for v in op.inputs:
cnt = consumer_count.get(v, 0)
consumer_count[v] = cnt + 1
for k, v in consumer_count.items():
if len(k.consumers()) != v:
return False
return True
def isum(s):
i = tf.constant(0, name="i")
c = lambda i, s: tf.less(i, 10)
b = lambda i, s: [tf.add(i, 1), tf.add(i, s)]
_, r_s = tf.while_loop(c, b, [i, s])
return r_s
class ControlFlowTest(tf.test.TestCase):
def testRefIdentity(self):
with self.test_session():
v = tf.Variable(7)
v = control_flow_ops._Identity(v)
op = tf.assign(v, 9)
v2 = control_flow_ops.with_dependencies([op], v)
self.assertTrue(check_op_order(v.graph))
self.assertTrue(isinstance(v2, tf.Tensor))
tf.initialize_all_variables().run()
self.assertEqual(9, v2.eval())
def testRefEnter(self):
with self.test_session():
v = tf.Variable(7)
enter_v = control_flow_ops._Enter(v, "foo_1", is_constant=True)
nine = tf.constant(9)
enter_nine = control_flow_ops.enter(nine, "foo_1")
op = tf.assign(enter_v, enter_nine)
v2 = control_flow_ops.with_dependencies([op], enter_v)
v3 = control_flow_ops.exit(v2)
tf.initialize_all_variables().run()
self.assertEqual(9, v3.eval())
def testRefSwitch(self):
with self.test_session():
v = tf.Variable(7)
p = tf.constant(True)
v1 = control_flow_ops._SwitchRefOrTensor(v.ref(), p)
v2 = tf.assign(v1[1], 9)
tf.initialize_all_variables().run()
self.assertEqual(9, v2.eval())
def testEnterMulExit(self):
with self.test_session():
data = tf.constant([1, 2, 3, 4, 5, 6], name="data")
enter_data = control_flow_ops.enter(data, "foo_1", False)
five = tf.constant(5)
enter_five = control_flow_ops.enter(five, "foo_1", False)
mul_op = tf.mul(enter_data, enter_five)
exit_op = control_flow_ops.exit(mul_op)
result = exit_op.eval()
self.assertAllEqual(np.array([x * 5 for x in [1, 2, 3, 4, 5, 6]]), result)
def testSwitchMergeIndexedSlices(self):
with self.test_session():
values = tf.constant([1, 2, 3, 4, 5, 6])
indices = tf.constant([0, 2, 4, 6, 8, 10])
data = tf.IndexedSlices(values, indices)
pred = tf.convert_to_tensor(True)
switch_op = control_flow_ops.switch(data, pred)
merge_op = control_flow_ops.merge(switch_op)[0]
val = merge_op.values.eval()
ind = merge_op.indices.eval()
self.assertAllEqual(np.arange(1, 7), val)
self.assertAllEqual(np.arange(0, 12, 2), ind)
def testSwitchDeadBranch(self):
with self.test_session():
data = tf.constant([1, 2, 3, 4, 5, 6], name="data")
ports = tf.convert_to_tensor(True, name="ports")
switch_op = control_flow_ops.switch(data, ports)
dead_branch = tf.identity(switch_op[0])
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "The tensor returned for" in str(e)):
dead_branch.eval()
def testSwitchMergeLess(self):
with self.test_session():
data = tf.constant([1, 2, 3, 4, 5, 6], name="data")
zero = tf.convert_to_tensor(0)
one = tf.convert_to_tensor(1)
less_op = tf.less(zero, one)
switch_op = control_flow_ops.switch(data, less_op)
merge_op = control_flow_ops.merge(switch_op)[0]
result = merge_op.eval()
self.assertAllEqual(np.arange(1, 7), result)
def testSwitchMergeAddIdentity(self):
with self.test_session():
data = tf.constant([1, 2, 3, 4, 5, 6], name="data")
ports = tf.convert_to_tensor(False, name="ports")
switch_op = control_flow_ops.switch(data, ports)
one = tf.constant(1)
add_op = tf.add(switch_op[0], one)
id_op = tf.identity(switch_op[1])
merge_op = control_flow_ops.merge([add_op, id_op])[0]
result = merge_op.eval()
self.assertAllEqual(np.array([x + 1 for x in [1, 2, 3, 4, 5, 6]]), result)
def testSwitchMergeAddMul(self):
with self.test_session():
data = tf.constant([1, 2, 3, 4, 5, 6], name="data")
ports = tf.convert_to_tensor(True, name="ports")
switch_op = control_flow_ops.switch(data, ports)
one = tf.constant(1)
add_op = tf.add(switch_op[0], one)
five = tf.constant(5)
mul_op = tf.mul(switch_op[1], five)
merge_op = control_flow_ops.merge([add_op, mul_op])[0]
result = merge_op.eval()
self.assertAllEqual(np.array([x * 5 for x in [1, 2, 3, 4, 5, 6]]), result)
def testLoop_false(self):
with self.test_session():
false = tf.convert_to_tensor(False)
n = tf.constant(10)
enter_false = control_flow_ops.enter(false, "foo_1", False)
enter_n = control_flow_ops.enter(n, "foo_1", False)
merge_n = control_flow_ops.merge([enter_n, enter_n], name="merge_n")[0]
switch_n = control_flow_ops.switch(merge_n, enter_false)
exit_n = control_flow_ops.exit(switch_n[0])
next_n = control_flow_ops.next_iteration(switch_n[0])
merge_n.op._update_input(1, next_n)
result = exit_n.eval()
self.assertAllEqual(10, result)
def testLoop_1(self):
with self.test_session():
zero = tf.constant(0)
one = tf.constant(1)
n = tf.constant(10)
enter_i = control_flow_ops.enter(zero, "foo", False)
enter_one = control_flow_ops.enter(one, "foo", True)
enter_n = control_flow_ops.enter(n, "foo", True)
with tf.device("/gpu:0"):
merge_i = control_flow_ops.merge([enter_i, enter_i])[0]
less_op = tf.less(merge_i, enter_n)
cond_op = control_flow_ops.loop_cond(less_op)
switch_i = control_flow_ops.switch(merge_i, cond_op)
add_i = tf.add(switch_i[1], enter_one)
next_i = control_flow_ops.next_iteration(add_i)
merge_i.op._update_input(1, next_i)
exit_i = control_flow_ops.exit(switch_i[0])
result = exit_i.eval()
self.assertAllEqual(10, result)
def testLoop_2(self):
with self.test_session():
zero = tf.constant(0)
one = tf.constant(1)
n = tf.constant(10)
enter_i = control_flow_ops.enter(zero, "foo", False)
enter_one = control_flow_ops.enter(one, "foo", True)
enter_n = control_flow_ops.enter(n, "foo", True)
merge_i = control_flow_ops.merge([enter_i, enter_i])[0]
less_op = tf.less(merge_i, enter_n)
cond_op = control_flow_ops.loop_cond(less_op)
switch_i = control_flow_ops.switch(merge_i, cond_op)
add_i = tf.add(switch_i[1], enter_one)
with tf.device("/gpu:0"):
next_i = control_flow_ops.next_iteration(add_i)
merge_i.op._update_input(1, next_i)
exit_i = control_flow_ops.exit(switch_i[0])
result = exit_i.eval()
self.assertAllEqual(10, result)
def testCondBool(self):
values = tf.constant(10)
fn1 = lambda: tf.add(values, 1)
fn2 = lambda: tf.sub(values, 1)
with self.assertRaisesRegexp(TypeError, "must not be a Python bool"):
_ = tf.cond(False, fn1, fn2)
def testCondIndexedSlices(self):
with self.test_session():
values = tf.constant(10)
indices = tf.constant(0)
x = tf.IndexedSlices(values, indices)
pred = tf.less(1, 2)
fn1 = lambda: tf.IndexedSlices(tf.add(x.values, 1), indices)
fn2 = lambda: tf.IndexedSlices(tf.sub(x.values, 1), indices)
r = tf.cond(pred, fn1, fn2)
val = r.values.eval()
ind = r.indices.eval()
self.assertTrue(check_op_order(x.values.graph))
self.assertAllEqual(11, val)
self.assertAllEqual(0, ind)
def testCondIndexedSlicesDifferentTypes(self):
with self.test_session():
values = tf.constant(10)
i_32 = tf.convert_to_tensor(0, name="one", dtype=tf.int32)
i_64 = tf.convert_to_tensor(0, name="one", dtype=tf.int64)
x = tf.IndexedSlices(values, i_32)
pred = tf.less(1, 2)
fn1 = lambda: tf.IndexedSlices(tf.add(x.values, 1), i_32)
fn2 = lambda: tf.IndexedSlices(tf.sub(x.values, 1), i_64)
r = tf.cond(pred, fn1, fn2)
val = r.values.eval()
ind = r.indices.eval()
self.assertTrue(check_op_order(x.values.graph))
self.assertAllEqual(11, val)
self.assertAllEqual(0, ind)
self.assertTrue(ind.dtype == np.int64)
def testCondColocation(self):
with self.test_session(use_gpu=True):
with tf.device("/cpu:0"):
v = tf.Variable(7.0)
x = tf.constant(10.0)
pred = tf.less(1.0, 2.0)
fn1 = lambda: tf.add(v, 1.0)
fn2 = lambda: tf.sub(x, 1.0)
r = tf.cond(pred, fn1, fn2)
for op in x.graph.get_operations():
if op.name == "cond/Add/Switch":
self.assertDeviceEqual(op.device, "/cpu:0")
def _testCond_1(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
x = tf.constant(10)
pred = tf.less(1, 2)
fn1 = lambda: tf.add(x, 1)
fn2 = lambda: tf.sub(x, 1)
r = tf.cond(pred, fn1, fn2)
result = r.eval()
self.assertTrue(check_op_order(x.graph))
self.assertAllEqual(11, result)
def testCond_1(self):
self._testCond_1(use_gpu=False)
self._testCond_1(use_gpu=True)
def testCond_2(self):
with self.test_session():
x = tf.constant(10)
r = tf.cond(tf.less(1, 0), lambda: tf.add(x, 1), lambda: tf.sub(x, 1))
result = r.eval()
self.assertTrue(check_op_order(x.graph))
self.assertAllEqual(9, result)
def testCond_3(self):
with self.test_session():
x = tf.constant(10)
pred = tf.less(1, 2)
fn1 = lambda: tf.add(x, 1)
fn2 = lambda: tf.sub(x, 1)
fn3 = lambda: tf.add(tf.cond(pred, fn1, fn2), 1)
r = tf.cond(pred, fn3, fn2)
result = r.eval()
self.assertTrue(check_op_order(x.graph))
self.assertAllEqual(12, result)
def testCond_4(self):
with self.test_session():
v1 = tf.Variable(7)
v2 = tf.Variable(7)
v3 = tf.Variable(7)
age = tf.constant(3)
max_age = tf.constant(2)
pred = tf.greater(age, max_age)
fn1 = lambda: [tf.assign(v1, 1).op, tf.assign(v2, 2).op]
fn2 = lambda: [tf.assign(v3, 3).op, tf.constant(10).op]
r = tf.cond(pred, fn1, fn2)
tf.initialize_all_variables().run()
self.assertEqual(len(r), 2)
result = r[1].eval()
self.assertTrue(check_op_order(age.graph))
self.assertAllEqual(True, result)
self.assertAllEqual(7, v1.eval())
self.assertAllEqual(2, v2.eval())
self.assertAllEqual(7, v3.eval())
def testCond_5(self):
with self.test_session():
alive = tf.constant(True, name="alive")
count = tf.constant(0, name="count")
def body(i):
return tf.cond(
alive, lambda: [tf.less(i, 3), tf.add(count, 1)],
lambda: [alive, count])
for i in range(10):
alive, count = body(i)
self.assertAllEqual(4, count.eval())
def testCond_6(self):
with self.test_session():
v1 = tf.Variable([7])
age = tf.constant(3)
pred = tf.greater(age, 4)
fn1 = lambda: age
fn2 = lambda: v1
r = tf.cond(pred, fn1, fn2)
tf.initialize_all_variables().run()
result = r.eval()
self.assertAllEqual(np.array([7]), result)
def testCond_7(self):
with self.test_session() as sess:
x = tf.constant(10)
y = tf.constant(200)
pred = tf.less(1, 2)
fn1 = lambda: [tf.add(x, 1), tf.add(x, 2)]
fn2 = lambda: [y, y]
r = tf.cond(pred, fn1, fn2)
self.assertAllEqual([11, 12], sess.run(r))
def testCondGrad_1(self):
with self.test_session():
x = tf.constant(10.0, name="x")
pred = tf.less(1, 2)
fn1 = lambda: tf.identity(x)
fn2 = lambda: tf.identity(x)
r = tf.cond(pred, fn1, fn2)
grad = tf.gradients(r, [x])[0]
result = grad.eval()
self.assertAllEqual(1.0, result)
def testCondGrad_2(self):
with self.test_session():
c = tf.placeholder(tf.int32, shape=[])
x = tf.constant(10.0)
pred = tf.less(c, 2)
fn1 = lambda: tf.mul(x, 42.0)
fn2 = lambda: tf.mul(x, 3.0)
r = tf.cond(pred, fn1, fn2)
grad = tf.gradients(r, [x])[0]
self.assertAllEqual(42.0, grad.eval(feed_dict={c: 1}))
self.assertAllEqual(3.0, grad.eval(feed_dict={c: 3}))
def testNestedCond_Simple(self):
with self.test_session():
x = tf.constant(0., name="X")
y = tf.cond(tf.constant(True),
lambda: x,
lambda: tf.cond(x < 1., lambda: x, lambda: x))
result = tf.gradients(y, x)[0]
self.assertEqual(1.0, result.eval())
z = tf.cond(tf.constant(False),
lambda: x,
lambda: tf.cond(x < 1., lambda: x, lambda: x))
result = tf.gradients(z, x)[0]
self.assertEqual(1.0, result.eval())
def testCondGrad_Gather(self):
with self.test_session() as sess:
v1 = tf.Variable([1.0, 42.0])
c = tf.placeholder(tf.int32, shape=[])
pred = tf.less(c, 2)
fn1 = lambda: tf.identity(v1)
fn2 = lambda: tf.gather(v1, [1, 1])
r = tf.cond(pred, fn1, fn2)
grad = tf.gradients(r, [v1])[0]
tf.initialize_all_variables().run()
# Should just be [1, 1], but possibly a sparse representation
gv, gi = sess.run([grad.values, grad.indices], feed_dict={c: 1})
dense_gv = [sum([y for (x, y) in zip(gi, gv) if x == i]) for i in range(2)
]
self.assertAllEqual(dense_gv, [1.0, 1.0])
# Should be [0, 2], as the else forwards v1[1] twice
gv, gi = sess.run([grad.values, grad.indices], feed_dict={c: 3})
dense_gv = [sum([y for (x, y) in zip(gi, gv) if x == i]) for i in range(2)
]
self.assertAllEqual(dense_gv, [0.0, 2.0])
# Microbenchmark: 10,000 iterations took 0.21s.
def testWhile_1(self):
with self.test_session():
n = tf.constant(0)
c = lambda x: tf.less(x, 10000)
b = lambda x: tf.add(x, 1)
r = tf.while_loop(c, b, [n], parallel_iterations=20)
self.assertEqual(10000, r.eval())
def testWhileWithRefs_1(self):
with self.test_session() as sess:
x = tf.Variable(0).ref()
i = tf.constant(0)
c = lambda i, x: tf.less(i, 100)
self.assertEqual(x.dtype, tf.int32_ref)
def b(i, x):
self.assertEqual(x.dtype, tf.int32_ref)
return (i+1, gen_array_ops._ref_identity(x))
r = tf.while_loop(c, b, [i, x], parallel_iterations=5)
tf.initialize_all_variables().run()
self.assertEqual(r[0].dtype, tf.int32)
self.assertEqual(r[1].dtype, tf.int32_ref)
value_i, value_x = sess.run(r)
self.assertEqual(100, value_i)
self.assertEqual(0, value_x)
def testWhile_2(self):
with self.test_session():
s = tf.constant(0)
r = isum(s)
self.assertAllEqual(45, r.eval())
# Have more than 10 parallel iterations and hence exercise k-bound
# most of the time.
def testWhile_3(self):
with self.test_session():
def compute(i, m, c, o):
m, c = [tf.add(m, 1), tf.add(c, 1)]
o = tf.add(o, m)
o = tf.add(o, c)
i = tf.add(i, 1)
return [i, m, c, o]
i = tf.convert_to_tensor(0)
m = tf.convert_to_tensor(0)
c = tf.convert_to_tensor(0)
o = tf.convert_to_tensor(0)
d = tf.convert_to_tensor(100)
r = tf.while_loop(
lambda i, m, c, o: tf.less(i, d), compute, [i, m, c, o])
result = r[3].eval()
self.assertTrue(check_op_order(i.graph))
self.assertAllEqual(10100, result)
def testWhile_4(self):
with self.test_session():
def compute(i, m, c, o):
m, c = [tf.gather(x, i), tf.gather(x, i)]
o = tf.add(o, m)
o = tf.add(o, c)
i = tf.add(i, 1)
return [i, m, c, o]
i = tf.convert_to_tensor(0)
m = tf.convert_to_tensor(0)
c = tf.convert_to_tensor(0)
o = tf.convert_to_tensor(0)
x = tf.convert_to_tensor([1, 2, 3, 4, 5, 6])
s = tf.size(x)
r = tf.while_loop(
lambda i, m, c, o: tf.less(i, s), compute, [i, m, c, o])
result = r[3].eval()
self.assertTrue(check_op_order(i.graph))
self.assertAllEqual(42, result)
def testWhile_5(self):
with self.test_session():
def compute(i, c, o):
c = tf.slice(x, tf.expand_dims(i, 0), [1])
o = tf.concat(0, [o, c])
i = tf.add(i, 1)
return [i, c, o]
i = tf.convert_to_tensor(0)
c = tf.convert_to_tensor(0)
o = tf.convert_to_tensor([0])
x = tf.convert_to_tensor([1, 2, 3, 4, 5, 6])
s = tf.size(x)
r = tf.while_loop(
lambda i, c, o: tf.less(i, s), compute, [i, c, o])
result = r[2].eval()
self.assertTrue(check_op_order(i.graph))
self.assertAllEqual(np.array([0, 1, 2, 3, 4, 5, 6]), result)
def _testWhile_Gpu_1(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
n = tf.constant(1.0)
c = lambda x: tf.less(x, 10.0)
b = lambda x: tf.add(x, 1.0)
r = tf.while_loop(c, b, [n])
self.assertAllClose(10.0, r.eval())
def testWhile_Gpu_1(self):
self._testWhile_Gpu_1(use_gpu=False)
self._testWhile_Gpu_1(use_gpu=True)
def _testWhile_Gpu_2(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
n = tf.constant(1.0)
c = lambda x: tf.less(x, 10.0)
def b(x):
with tf.device("/cpu:0"):
return tf.add(x, 1.0)
r = tf.while_loop(c, b, [n])
self.assertAllClose(10.0, r.eval())
def testWhile_Gpu_2(self):
self._testWhile_Gpu_1(use_gpu=False)
self._testWhile_Gpu_1(use_gpu=True)
def testWhileShape(self):
with self.test_session():
i = tf.constant(0)
m = tf.ones([2, 2])
c = lambda i, j: tf.less(i, 2)
def _b(i, j):
new_i = tf.add(i, 1)
new_j = tf.tile(j, [2, 2])
return [new_i, new_j]
r = tf.while_loop(c, _b, [i, m])
r = r[1] * tf.ones([8, 8])
self.assertAllEqual(np.ones((8, 8)), r.eval())
def testWhileShapeInference(self):
with self.test_session():
i = tf.constant(0)
m = tf.ones([2, 2])
c = lambda i, j: tf.less(i, 2)
def _b(i, j):
new_i = tf.add(i, 1)
new_j = tf.concat(0, [j, j])
return [new_i, new_j]
r = tf.while_loop(c, _b, [i, m])
self.assertTrue(r[1].get_shape()[0].value is None)
self.assertEqual(r[1].get_shape()[1], tf.Dimension(2))
def _testNestedWhile_1(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
n = tf.constant(0)
def cpu_sum(s):
c = lambda i, s: tf.less(i, 10)
def b(i, s):
i1 = tf.add(i, 1)
with tf.device("/cpu:0"):
s1 = tf.add(i, s)
return i1, s1
_, r_s = tf.while_loop(c, b, [n, s])
return r_s
c = lambda x: tf.less(x, 200)
b = lambda x: tf.add(x, cpu_sum(n))
r = tf.while_loop(c, b, [n])
self.assertEqual(225, r.eval())
def testNestedWhile_1(self):
self._testNestedWhile_1(use_gpu=False)
self._testNestedWhile_1(use_gpu=True)
def testWhileWithControl_1(self):
with self.test_session():
n = tf.constant(0)
r = tf.constant(0)
condition = lambda n_, r_: tf.less(n_, 10)
def body(n_, r_):
n_ = tf.add(n_, 1)
with r_.graph.control_dependencies([r_]):
r_ = tf.constant(12)
return [n_, r_]
res = tf.while_loop(condition, body, [n, r],
parallel_iterations=1)
self.assertAllEqual(12, res[1].eval())
def testWhileWithControl_2(self):
with self.test_session():
r = tf.constant(0)
condition = lambda r_: tf.less(r_, 10)
def body(r_):
with r_.graph.control_dependencies([r_]):
r_ = tf.constant(12)
return [r_]
res = tf.while_loop(condition, body, [r], parallel_iterations=1)
self.assertAllEqual(12, res.eval())
def testWhileWithControl_3(self):
with self.test_session() as sess:
b = tf.placeholder(tf.bool)
c = tf.constant(0)
with tf.control_dependencies([b]):
c = tf.while_loop(lambda x: x < 10, lambda x: x + 1, [c])
self.assertEqual(10, sess.run(c, {b: True}))
def testCondWhile_1(self):
with self.test_session():
n = tf.convert_to_tensor(0, name="n")
c = lambda x: tf.less(x, 10)
b = lambda x: tf.add(x, 1)
r = tf.cond(tf.less(0, 1),
lambda: tf.while_loop(c, b, [n]),
lambda: n)
self.assertAllEqual(10, r.eval())
def testCondWhile_2(self):
with self.test_session():
n = tf.convert_to_tensor(0)
c = lambda x: tf.less(x, 10)
b = lambda x: tf.add(x, 1)
r = tf.cond(tf.less(1, 0), lambda: tf.add(n, 1),
lambda: tf.while_loop(c, b, [n]))
self.assertAllEqual(10, r.eval())
def testWhileCond_1(self):
with self.test_session():
i = tf.convert_to_tensor(0, name="i")
n = tf.convert_to_tensor(10, name="n")
one = tf.convert_to_tensor(1, name="one")
c = lambda x: tf.less(x, n)
# pylint: disable=undefined-variable
# for OSS build
b = lambda x: tf.cond(
tf.constant(True), lambda: tf.add(x, one), lambda: tf.sub(x, one))
# pylint: enable=undefined-variable
r = tf.while_loop(c, b, [i])
self.assertAllEqual(10, r.eval())
def testWhileCond_2(self):
with self.test_session():
n = tf.convert_to_tensor(0, name="n")
c = lambda x: tf.less(x, 10)
b = lambda x: tf.cond(tf.constant(True), lambda: tf.add(x, 1), lambda: n)
r = tf.while_loop(c, b, [n])
self.assertAllEqual(10, r.eval())
def testWhileCond_3(self):
with self.test_session():
n = tf.convert_to_tensor(0)
c = lambda x: tf.less(x, 10)
# pylint: disable=undefined-variable
# for OSS build
b = lambda x: tf.cond(tf.less(0, 1), lambda: tf.add(x, 1),
lambda: tf.sub(x, 1))
# pylint: enable=undefined-variable
r = tf.while_loop(c, b, [n])
self.assertAllEqual(10, r.eval())
# NOTE: It is ok to have parallel_iterations > 1
def testWhileUpdateVariable_1(self):
with self.test_session():
select = tf.Variable([3.0, 4.0, 5.0])
n = tf.constant(0)
def loop_iterator(j):
return tf.less(j, 3)
def loop_body(j):
ns = tf.scatter_update(select, j, 10.0)
nj = tf.add(j, 1)
op = control_flow_ops.group(ns)
nj = control_flow_ops.with_dependencies([op], nj)
return [nj]
r = tf.while_loop(loop_iterator, loop_body, [n],
parallel_iterations=1)
self.assertTrue(check_op_order(n.graph))
tf.initialize_all_variables().run()
self.assertEqual(3, r.eval())
result = select.eval()
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result)
def testWhileUpdateVariable_2(self):
with self.test_session():
select1 = tf.Variable([3.0, 4.0, 5.0])
select2 = tf.Variable([3.0, 4.0, 5.0])
n = tf.constant(0)
def loop_iterator(j):
return tf.less(j, 3)
def loop_body(j):
ns1 = tf.scatter_update(select1, j, 10.0)
ns2 = tf.scatter_update(select2, j, 10.0)
nj = tf.add(j, 1)
op = control_flow_ops.group(ns1, ns2)
nj = control_flow_ops.with_dependencies([op], nj)
return [nj]
r = tf.while_loop(loop_iterator, loop_body, [n],
parallel_iterations=1)
self.assertTrue(check_op_order(n.graph))
tf.initialize_all_variables().run()
self.assertEqual(3, r.eval())
result1 = select1.eval()
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result1)
result2 = select2.eval()
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result2)
def testWhileUpdateVariable_3(self):
with self.test_session():
select = tf.Variable([3.0, 4.0, 5.0])
n = tf.constant(0)
def loop_iterator(j, _):
return tf.less(j, 3)
def loop_body(j, _):
ns = tf.scatter_update(select, j, 10.0)
nj = tf.add(j, 1)
return [nj, ns]
r = tf.while_loop(loop_iterator, loop_body,
[n, tf.identity(select)],
parallel_iterations=1)
tf.initialize_all_variables().run()
result = r[1].eval()
self.assertTrue(check_op_order(n.graph))
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result)
# b/24814703
def testWhileUpdateVariable_4(self):
with self.test_session():
var_a = tf.Variable(0, name="a")
var_b = tf.Variable(0, name="b")
tf.initialize_all_variables().run()
c = tf.constant(0, name="c")
asn1 = tf.assign_add(var_a, 1, name="a_add")
# Loop condition
def pred(i):
return tf.less(i, 10)
# Loop body
def loop_body(i):
asn2 = tf.assign_add(var_b, asn1, name="b_add")
with tf.control_dependencies([asn2]):
ni = tf.add(i, 1, name="i_add")
return ni
lpa = tf.while_loop(pred, loop_body, [c],
parallel_iterations=1)
self.assertEqual(0, var_b.eval())
lpa.eval() # Run the loop
self.assertEqual(10, var_b.eval())
# b/24736492
def testWhileUpdateVariable_5(self):
with self.test_session():
# Create some variables.
var_a = tf.Variable(0, name="a")
var_b = tf.Variable(0, name="b")
tf.initialize_all_variables().run()
# Change condition to check var_b
def pred(_):
return tf.less(var_b, 10)
# Change body to increment var_b
def loop_body(i):
asn1 = tf.assign_add(var_a, tf.constant(1), name="a_add")
asn2 = tf.assign_add(var_b, tf.constant(1), name="b_add")
with tf.control_dependencies([asn1, asn2]):
inc_b = tf.identity(var_b)
return inc_b
lpa = tf.while_loop(pred, loop_body, [var_b], 1, name="loop")
self.assertEqual(0, var_b.eval())
lpa.eval() # Run the loop
self.assertEqual(10, var_a.eval())
self.assertEqual(10, var_b.eval())
# b/24814668
def testWhileUpdateVariable_6(self):
with self.test_session():
# Create some variables.
var_a = tf.Variable(0, name="a")
var_b = tf.Variable(0, name="b")
c = tf.constant(0)
tf.initialize_all_variables().run()
# Loop condition
def pred(i):
return tf.less(i, 10)
# Loop body
def loop_body(i):
asn1 = tf.assign_add(var_a, 1, name="a_add")
with tf.control_dependencies([asn1]):
asn2 = tf.assign_add(var_b, var_a, name="b_add")
with tf.control_dependencies([asn2]):
ni = tf.add(i, 1, name="i_add")
return ni
lpa = tf.while_loop(pred, loop_body, [c], 1, name="loop")
self.assertEqual(0, var_b.eval())
lpa.eval() # Run the loop
self.assertEqual(55, var_b.eval())
self.assertEqual(10, var_a.eval())
def testWhileQueue_1(self):
with self.test_session():
q = tf.FIFOQueue(-1, tf.int32)
i = tf.constant(0)
def c(i):
return tf.less(i, 10)
def b(i):
ni = tf.add(i, 1)
ni = control_flow_ops.with_dependencies([q.enqueue((i,))], ni)
return ni
r = tf.while_loop(c, b, [i], parallel_iterations=1)
self.assertEqual([10], r.eval())
for i in xrange(10):
self.assertEqual([i], q.dequeue().eval())
def testWhileStack_1(self):
with self.test_session():
s = gen_data_flow_ops._stack(tf.int32, stack_name="foo")
i = tf.constant(0)
def c(i):
return tf.less(i, 10)
def b(i):
ni = tf.add(i, 1)
ni = control_flow_ops.with_dependencies(
[gen_data_flow_ops._stack_push(s, i)], ni)
return ni
r = tf.while_loop(c, b, [i], parallel_iterations=1)
x = tf.constant(0)
def c1(i, _):
return tf.greater(i, 0)
def b1(i, x):
ni = tf.sub(i, 1)
nx = x + gen_data_flow_ops._stack_pop(s, tf.int32)
return [ni, nx]
_, rx = tf.while_loop(c1, b1, [r, x], parallel_iterations=1)
self.assertEqual(45, rx.eval())
def testWhileGrad_Square(self):
with self.test_session():
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = tf.square
r = tf.while_loop(c, b, [v], parallel_iterations=1)
r = control_flow_ops.cond(tf.less(1, 2), lambda: r, lambda: v)
r = tf.gradients(r, v)[0]
self.assertAllClose(1024.0, r.eval())
def testWhileGrad_Shape(self):
with self.test_session():
x = tf.placeholder(tf.float32, shape=[None])
v = tf.constant([2.0], name="v")
n = tf.constant(0, name="n")
c = lambda i, v: tf.less(i, 5)
b = lambda i, v: [i + 1, tf.mul(x, v)]
r = tf.while_loop(c, b, [n, v], parallel_iterations=1)
r = tf.gradients(r[1], x)[0]
self.assertEqual([None], r.get_shape().as_list())
self.assertAllClose([810.0, 2560.0], r.eval(feed_dict={x: [3.0, 4.0]}))
def testWhileGrad_MultipleUses(self):
with self.test_session():
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = tf.square
r = tf.while_loop(c, b, [v], parallel_iterations=1)
r = tf.mul(r, r)
r = tf.gradients(r, v)[0]
self.assertEqual(524288.0, r.eval())
def testWhileGrad_LoopAdd(self):
with self.test_session():
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = tf.square
r = tf.while_loop(c, b, [v], parallel_iterations=1)
r = tf.add(r, r)
r = tf.gradients(r, v)[0]
self.assertAllClose(2048.0, r.eval())
def _testWhileGrad_Mul(self, use_gpu, p_iters):
with self.test_session(use_gpu=use_gpu) as sess:
a = tf.constant(3.0, name="a")
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = lambda v: tf.mul(v, a)
r = tf.while_loop(c, b, [v], parallel_iterations=p_iters)
grad_a, grad_v = tf.gradients(r, [a, v])
grad_a_val, grad_v_val = sess.run([grad_a, grad_v])
self.assertAllClose(216.0, grad_a_val)
self.assertAllClose(81.0, grad_v_val)
def testWhileGrad_Mul(self):
self._testWhileGrad_Mul(use_gpu=False, p_iters=1)
self._testWhileGrad_Mul(use_gpu=False, p_iters=10)
self._testWhileGrad_Mul(use_gpu=True, p_iters=1)
self._testWhileGrad_Mul(use_gpu=True, p_iters=10)
def testWhileGrad_Variable(self):
with self.test_session():
a = tf.Variable(3.0)
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = lambda v: tf.mul(v, a)
r = tf.while_loop(c, b, [v], parallel_iterations=1)
r = tf.gradients(r, a)
tf.initialize_all_variables().run()
self.assertAllClose(216.0, r[0].eval())
def testWhileGrad_ys_xs(self):
with self.test_session():
x = tf.constant(3.0, name="x")
y = tf.constant(2.0, name="y")
c = lambda x, y: tf.less(x, 100.0)
def b(x, y):
y1 = tf.add(x, y)
x1 = tf.mul(x, y1)
return x1, y1
rx, ry = tf.while_loop(c, b, [x, y], parallel_iterations=1)
r = tf.gradients([rx, ry], x)
self.assertAllClose(304.0, r[0].eval())
r = tf.gradients([rx, ry], y)
self.assertAllClose(124.0, r[0].eval())
r = tf.gradients([rx], x)
self.assertAllClose(295.0, r[0].eval())
r = tf.gradients([rx], y)
self.assertAllClose(120.0, r[0].eval())
def testWhileGrad_Dependency(self):
with self.test_session():
i = tf.constant(0, name="i")
x = tf.constant(2.0, name="x")
c = lambda i, x: tf.less(i, 10)
def b(i, x):
x = tf.mul(x, 2.0)
i = tf.add(i, 1)
return i, x
ri, rx = tf.while_loop(c, b, [i, x], parallel_iterations=1)
r = tf.gradients([ri, rx], x)
self.assertAllClose(1024.0, r[0].eval())
r = tf.gradients([rx], x)
self.assertAllClose(1024.0, r[0].eval())
def testWhileGrad_NoGradient(self):
with self.test_session():
v = tf.constant(2.0, name="v")
c = lambda v: tf.less(v, 100.0)
b = tf.square
r = tf.while_loop(c, b, [v], back_prop=False)
r = tf.add(r, v)
r = tf.gradients(r, v)
self.assertAllClose(1.0, r[0].eval())
def testWhileGrad_NoDependency(self):
with self.test_session() as sess:
variable = tf.Variable(tf.ones([2, 3]))
time = tf.zeros([], dtype=tf.int32)
def cond(time, tensor, _):
return time < 10
def body(time, tensor, _):
return (time+1, tensor, tensor)
loop_vars = [time, variable, variable]
tensors = tf.while_loop(cond=cond, body=body, loop_vars=loop_vars)
cost = tf.reduce_sum(tensors[2])
grad = tf.gradients(cost, [variable])
tf.initialize_all_variables().run()
self.assertAllClose(np.ones([2, 3]), sess.run(grad[0]))
def testWhileGrad_Const(self):
with self.test_session() as sess:
c0 = tf.constant(0.0, name="c0")
c1 = tf.constant(1.0, name="c1")
time = tf.constant(0, name="t")
def cond(time, _):
return time < 1
def body(time, tensor):
return time+1, c1
loop_vars = [time, c0]
tensors = tf.while_loop(cond=cond, body=body, loop_vars=loop_vars)
cost = tf.reduce_sum(tensors[1])
grad = tf.gradients(cost, [c0])
self.assertAllClose(0.0, sess.run(grad[0]))
def testWhileGrad_SerialTwoLoops(self):
with self.test_session():
i = tf.constant(0, name="i")
x = tf.constant(2.0, name="x")
c = lambda i, x: tf.less(i, 5)
def b(i, x):
x = tf.mul(x, 2.0)
i = tf.add(i, 1)
return i, x
_, rx = tf.while_loop(c, b, [i, x], parallel_iterations=1)
_, rx = tf.while_loop(c, b, [i, rx], parallel_iterations=1)
r = tf.gradients([rx], x)
self.assertAllClose(1024.0, r[0].eval())
def testWhileGrad_ParallelTwoLoops(self):
with self.test_session():
i = tf.constant(0, name="i")
x = tf.constant(2.0, name="x")
c = lambda i, x: tf.less(i, 5)
def b(i, x):
x = tf.mul(x, 2.0)
i = tf.add(i, 1)
return i, x
_, r1 = tf.while_loop(c, b, [i, x], parallel_iterations=1)
_, r2 = tf.while_loop(c, b, [i, x], parallel_iterations=1)
rx = tf.add(r1, r2)
r = tf.gradients([rx], x)
self.assertAllClose(64.0, r[0].eval())
def _testNestedWhileGrad_Simple(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
v = tf.constant(1.0)
def inner_loop(s):
c = lambda x: tf.less(x, 4.0)
b = lambda x: tf.mul(x, 2.0)
return tf.while_loop(c, b, [s])
c = lambda x: tf.less(x, 2.0)
b = lambda x: tf.mul(inner_loop(x), 2.0)
r = tf.while_loop(c, b, [v])
r = tf.gradients(r, v)[0]
self.assertAllClose(8.0, r.eval())
def testNestedWhileGrad_Simple(self):
self._testNestedWhileGrad_Simple(use_gpu=False)
self._testNestedWhileGrad_Simple(use_gpu=True)
def testNestedWhileGrad_SerialInner(self):
with self.test_session():
v = tf.constant(1.0)
def inner_loop1(s):
z = tf.constant(0)
c = lambda i, x: tf.less(i, 4)
b = lambda i, x: [tf.add(i, 1), tf.mul(x, 2.0)]
return tf.while_loop(c, b, [z, s])
def inner_loop2(s):
z = tf.constant(0)
c = lambda i, x: tf.less(i, 4)
b = lambda i, x: [tf.add(i, 1), tf.mul(x, 2.0)]
return tf.while_loop(c, b, [z, s])
c = lambda x: tf.less(x, 128.0)
b = lambda x: inner_loop2(inner_loop1(x)[1])[1]
r = tf.while_loop(c, b, [v])
r = tf.gradients(r, v)[0]
self.assertAllClose(256.0, r.eval())
def testNestedWhileGrad_ParallelInner(self):
with self.test_session():
v = tf.constant(1.0)
def inner_loop1(s):
z = tf.constant(0)
c = lambda i, x: tf.less(i, 4)
b = lambda i, x: [tf.add(i, 1), tf.mul(x, 2.0)]
return tf.while_loop(c, b, [z, s])
def inner_loop2(s):
z = tf.constant(0)
c = lambda i, x: tf.less(i, 4)
b = lambda i, x: [tf.add(i, 1), tf.mul(x, 2.0)]
return tf.while_loop(c, b, [z, s])
c = lambda x: tf.less(x, 128.0)
b = lambda x: tf.mul(inner_loop1(x)[1], inner_loop2(x)[1])
r = tf.while_loop(c, b, [v])
r = tf.gradients(r, v)[0]
self.assertAllClose(512.0, r.eval())
def _testWhileCondGrad_Simple(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
v = tf.convert_to_tensor(2.0, name="v")
n = tf.convert_to_tensor(100.0, name="n")
one = tf.convert_to_tensor(1.0, name="one")
c = lambda x: tf.less(x, n)
# pylint: disable=undefined-variable
# for OSS build
b = lambda x: control_flow_ops.cond(tf.constant(True),
lambda: tf.square(x),
lambda: tf.sub(x, one))
# pylint: enable=undefined-variable
r = tf.while_loop(c, b, [v])
r = tf.gradients(r, v)[0]
self.assertAllClose(1024.0, r.eval())
def testWhileCondGrad_Simple(self):
self._testWhileCondGrad_Simple(use_gpu=False)
self._testWhileCondGrad_Simple(use_gpu=True)
def testWhileCondGrad_UnknownShape(self):
with self.test_session() as sess:
v = tf.placeholder(tf.float32)
n = tf.convert_to_tensor(100.0, name="n")
one = tf.convert_to_tensor(1.0, name="one")
c = lambda x: tf.less(x, n)
# pylint: disable=undefined-variable
# for OSS build
b = lambda x: control_flow_ops.cond(tf.constant(True),
lambda: tf.square(x),
lambda: tf.sub(x, one))
# pylint: enable=undefined-variable
r = tf.while_loop(c, b, [v])
r = tf.gradients(r, v)[0]
r = sess.run(r, feed_dict={v: 2.0})
self.assertAllClose(1024.0, r)
def testWhileWithRefsWithGradients_1(self):
with self.test_session() as sess:
x = tf.Variable(0).ref()
i = tf.constant(0)
c = lambda i, x: tf.less(i, 10)
self.assertEqual(x.dtype, tf.int32_ref)
# pylint: disable=protected-access
def body(i, x):
self.assertEqual(x.dtype, tf.int32_ref)
return (i+1, gen_array_ops._ref_identity(x))
# pylint: enable=protected-access
r = tf.while_loop(c, body, [i, x], parallel_iterations=5)
grad_ys = [tf.Variable(73).ref()]
grad = tf.gradients([r[1]], [x], grad_ys=grad_ys)
tf.initialize_all_variables().run()
self.assertEqual(r[0].dtype, tf.int32)
self.assertEqual(r[1].dtype, tf.int32_ref)
value_i, value_x, value_x_grad = sess.run(r + grad)
self.assertEqual(10, value_i)
self.assertEqual(0, value_x)
self.assertEqual(73, value_x_grad)
def testWhileGrad_IndexedSlices(self):
with self.test_session():
values = tf.constant([2.0, 4.0], name="values")
indices = tf.constant([0, 3], name="indices")
shape = tf.constant([10], name="dense_shape")
i = tf.constant(0)
x = tf.IndexedSlices(values, indices, dense_shape=shape)
def c(i, _):
return i < 10
def b(i, x):
return [i + 1, tf.IndexedSlices(x.values * 2.0, x.indices,
x.dense_shape)]
_, r = tf.while_loop(c, b, [i, x])
r = tf.gradients(r.values, values)[0]
self.assertAllClose(np.array([1024.0, 1024.0]), r.eval())
def testWhileGrad_SparseTensor(self):
with self.test_session():
values = tf.constant([2.0, 4.0], name="values")
indices = tf.constant([[0], [3]], dtype=tf.int64, name="indices")
shape = tf.constant([10], dtype=tf.int64, name="dense_shape")
i = tf.constant(0)
x = tf.SparseTensor(indices, values, shape=shape)
def c(i, _):
return i < 10
def b(i, x):
return [i + 1, tf.SparseTensor(x.indices, x.values * 2.0,
x.shape)]
_, r = tf.while_loop(c, b, [i, x])
r = tf.gradients(r.values, values)[0]
self.assertAllClose(np.array([1024.0, 1024.0]), r.eval())
def testCallGradInLoop(self):
with self.test_session() as sess:
i0 = tf.constant(0)
params = tf.constant(5.0)
params_1 = tf.square(params)
def c(i, _):
return i < 10
def b(i, x):
data = tf.constant([1.0, 2.0, 3.0])
data = tf.mul(data, params_1)
x1 = x + tf.gradients(data, params)[0]
return i + 1, x1
output_grad = tf.while_loop(c, b, [i0, tf.constant(0.0)])
self.assertAllClose(600.0, sess.run(output_grad)[1])
def testWhileGradGrad(self):
theta = tf.Variable(initial_value=1.)
def fn(x, prev):
return prev + x * theta
result = tf.scan(fn, [1., 2., 3.])
grad_theta = tf.gradients(result, theta)
with self.assertRaisesRegexp(TypeError, "Second-order gradient"):
tf.gradients(grad_theta, theta)
def testOneValueCond(self):
with self.test_session():
c = tf.placeholder(tf.int32, shape=[])
one = tf.convert_to_tensor(1, name="one")
two = tf.convert_to_tensor(2, name="two")
p = tf.greater_equal(c, 1)
i = tf.cond(p, lambda: one, lambda: two)
self.assertTrue(isinstance(i, tf.Tensor))
# True case: c = 2 is >= 1
self.assertEqual([1], i.eval(feed_dict={c: 2}))
# False case: c = 0 is not >= 1
self.assertEqual([2], i.eval(feed_dict={c: 0}))
def testExampleCond(self):
with self.test_session():
x = tf.convert_to_tensor([-2.0, 2.0], name="x")
d = tf.placeholder(tf.int32, shape=[])
def l2():
return tf.sqrt(tf.reduce_sum(tf.square(x)))
def l1():
return tf.reduce_sum(tf.abs(x))
i = tf.cond(tf.equal(d, 2), l2, l1)
self.assertAllClose(4.0, i.eval(feed_dict={d: 1}))
self.assertAllClose(2.0 * math.sqrt(2), i.eval(feed_dict={d: 2}))
def testCase(self):
with self.test_session():
x = tf.constant(1)
y = tf.constant(2)
z = tf.constant(3)
f1 = lambda: tf.constant(17)
f2 = lambda: tf.constant(23)
f3 = lambda: tf.constant(-1)
r1 = tf.case({x < y: f1, x > z: f2}, default=f3, exclusive=True)
self.assertAllEqual(r1.eval(), 17)
r2 = tf.case([(y > z, f1), (y > x, f2)], default=f3)
self.assertAllEqual(r2.eval(), 23)
# Duplicate events can happen, first one is selected
r3 = tf.case([(x < y, f1), (x < y, f2)], default=f3)
self.assertAllEqual(r3.eval(), 17)
# Duplicate events cause an error if exclusive = True
r4 = tf.case([(x < y, f1), (x < y, f2)], default=f3, exclusive=True)
with self.assertRaisesOpError(
"More than one condition evaluated as True but exclusive=True."):
r4.eval()
# Check that the default is called if none of the others are
r5 = tf.case({x > y: f1}, default=f3)
self.assertAllEqual(r5.eval(), -1)
ran_once = [False, False, False]
def break_run_twice(ix):
def _break():
ran_once[ix] = True
return tf.constant(ix)
return _break
# Should not fail - each conditional gets called exactly once
# except default. Default gets called twice: once to create an
# empty output and once for the actual cond switch.
r6 = tf.case([(x < y, break_run_twice(0)), (x > y, break_run_twice(1))],
default=lambda: tf.constant(2))
self.assertAllEqual(r6.eval(), 0)
def testCaseSideEffects(self):
with self.test_session() as sess:
v0 = tf.Variable(-1)
v1 = tf.Variable(-1)
v2 = tf.Variable(-1)
a = lambda: control_flow_ops.with_dependencies([tf.assign(v0, 0)], 0)
b = lambda: control_flow_ops.with_dependencies([tf.assign(v1, 1)], 1)
c = lambda: control_flow_ops.with_dependencies([tf.assign(v2, 2)], 2)
x = tf.constant(1)
y = tf.constant(2)
r0 = tf.case(((x < y, a), (x > y, b)), default=c, exclusive=True)
r1 = tf.case(((x > y, a), (x < y, b)), default=c, exclusive=True)
r2 = tf.case(((x > y, a), (x > y, b)), default=c, exclusive=True)
tf.initialize_all_variables().run()
self.assertAllEqual(sess.run([v0, v1, v2]), [-1] * 3)
self.assertEqual(2, r2.eval())
self.assertAllEqual(sess.run([v0, v1, v2]), [-1, -1, 2])
tf.initialize_all_variables().run()
self.assertAllEqual(sess.run([v0, v1, v2]), [-1] * 3)
self.assertEqual(1, r1.eval())
self.assertAllEqual(sess.run([v0, v1, v2]), [-1, 1, -1])
tf.initialize_all_variables().run()
self.assertAllEqual(sess.run([v0, v1, v2]), [-1] * 3)
self.assertEqual(0, r0.eval())
self.assertAllEqual(sess.run([v0, v1, v2]), [0, -1, -1])
def testOneOpCond(self):
with self.test_session():
v = tf.Variable(0)
c = tf.convert_to_tensor(0)
one = tf.convert_to_tensor(1)
two = tf.convert_to_tensor(2)
p = tf.greater_equal(c, 1)
def a():
return tf.assign(v, one)
def b():
return tf.assign(v, two)
i = tf.cond(p, a, b)
self.assertTrue(isinstance(i, tf.Tensor))
tf.initialize_all_variables().run()
self.assertEqual(0, v.eval())
# True case: c = 2 is >= 1, v is set to 1.
self.assertEqual(1, i.eval(feed_dict={c.name: 2}))
self.assertEqual(1, v.eval())
# False case: c = 0 is not >= 1, v is set to 2.
self.assertEqual(2, i.eval(feed_dict={c.name: 0}))
self.assertEqual(2, v.eval())
def testWithOpsDependencies(self):
with self.test_session() as sess:
v = tf.Variable(0.0)
c = tf.constant(10)
# Fetching v directly will result in an uninitialized error
with self.assertRaisesOpError("Attempting to use uninitialized value"):
sess.run([c, v])
# Use a control dependency to ensure init_variable is run
# while asking for c
real_v = control_flow_ops.with_dependencies(
name="real_tensor",
output_tensor=v.ref(),
dependencies=[v.initializer])
c_val, real_v_val = sess.run([c, real_v])
# Ensure the result of 'real_c' is the same as 'c'
self.assertAllEqual(10, c_val)
# Ensure that 'v' is initialized
self.assertAllClose(0.0, real_v_val)
def testWithTensorDependencies(self):
with self.test_session():
v = tf.Variable(0.0)
c1 = tf.constant(10)
c2 = tf.constant(20)
# c1_with_init_v depends on the init op for v
c1_with_init_v = control_flow_ops.with_dependencies(
name="c1_with_init_v",
output_tensor=c1,
dependencies=[v.initializer])
# c2_with_c1 depends on the value of c1_with_init_v
c2_with_c1_dep = control_flow_ops.with_dependencies(
name="c2_with_c1_dep",
output_tensor=c2,
dependencies=[c1_with_init_v])
# Fetching v directly will result in an uninitialized error
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v.eval()
# Get the value of 'c2_with_c1_dep', which should cause 'v'
# to be initialized.
self.assertAllEqual(20, c2_with_c1_dep.eval())
# Ensure that 'v' is initialized
self.assertAllClose(0.0, v.eval())
def testWithIndexedSlicesDependencies(self):
with self.test_session():
v = tf.Variable(
np.array([[0.0, 1.0], [10.0, 11.0], [20.0, 21.0]]).astype(np.float32))
v_at_1 = tf.IndexedSlices(v, tf.constant([1]))
gather_v_at_1 = tf.gather(v_at_1.values, v_at_1.indices)
v_at_1_after_init = control_flow_ops.with_dependencies([v.initializer],
v_at_1)
gather_v_at_1_after_init = tf.gather(
v_at_1_after_init.values, v_at_1_after_init.indices)
# Fetching gather_v_at_1 will result in an uninitialized error
with self.assertRaisesOpError("Attempting to use uninitialized value"):
gather_v_at_1.eval()
# Getting gather_v_at_1_after_init will work, and initialize v.
self.assertAllEqual([[10.0, 11.0]], gather_v_at_1_after_init.eval())
# Double check that 'v' is initialized
self.assertAllClose([[0.0, 1.0], [10.0, 11.0], [20.0, 21.0]], v.eval())
def testDependenciesDevice(self):
with tf.Graph().as_default():
# device set on tensor => same device on dep.
with tf.device("/job:ps"):
vd = tf.Variable([0.0])
with_vd_dep = control_flow_ops.with_dependencies([vd.initializer], vd)
self.assertTrue("/job:ps" in with_vd_dep.device)
# No device set on tensor => no device on dep.
vnod = tf.Variable([0.0])
with_vnod_dep = control_flow_ops.with_dependencies([vnod.initializer],
vnod)
self.assertDeviceEqual(None, with_vnod_dep.device)
# device set on tensor, default device on graph => default device on dep.
vdef = tf.Variable([0.0], name="vdef")
with tf.device("/job:worker/gpu:1"):
with_vdef_dep = control_flow_ops.with_dependencies([vdef.initializer],
vdef)
# The device is empty, but the colocation constraint is set.
self.assertDeviceEqual("", with_vdef_dep.device)
self.assertEqual([b"loc:@vdef"],
with_vdef_dep.op.colocation_groups())
def testGroup(self):
with self.test_session() as sess:
v1 = tf.Variable([0.0])
v2 = tf.Variable([1.0])
# Group init1 and init2 and run.
init = control_flow_ops.group(v1.initializer, v2.initializer)
# Fetching v1 directly will result in an uninitialized error
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v1.eval()
# Runs "init" before fetching v1 and v2.
init.run()
v1_val, v2_val = sess.run([v1, v2])
# Ensure that v1 and v2 are initialized
self.assertAllClose([0.0], v1_val)
self.assertAllClose([1.0], v2_val)
def testGroupEmpty(self):
op = tf.group()
self.assertEqual(op.type, "NoOp")
self.assertEqual(op.control_inputs, [])
def testMergeShapes(self):
# All inputs unknown.
p1 = tf.placeholder(tf.float32)
p2 = tf.placeholder(tf.float32)
p3 = tf.placeholder(tf.float32)
m, index = control_flow_ops.merge([p1, p2, p3])
self.assertIs(None, m.get_shape().ndims)
self.assertEqual([], index.get_shape())
# All inputs known with different ranks.
p1 = tf.placeholder(tf.float32, shape=[1, 2])
p2 = tf.placeholder(tf.float32, shape=[1, 2, 3])
m, index = control_flow_ops.merge([p1, p2])
self.assertIs(None, m.get_shape().ndims)
self.assertEqual([], index.get_shape())
# All inputs known with some dimensions different.
p1 = tf.placeholder(tf.float32, shape=[1, 2])
p2 = tf.placeholder(tf.float32, shape=[2, 1])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([None, None], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
p1 = tf.placeholder(tf.float32, shape=[1, 2])
p2 = tf.placeholder(tf.float32, shape=[None, 2])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([None, 2], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
p1 = tf.placeholder(tf.float32, shape=[1, 2])
p2 = tf.placeholder(tf.float32, shape=[2, 2])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([None, 2], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
# All inputs known with same dimensions.
p1 = tf.placeholder(tf.float32, shape=[1, 2])
p2 = tf.placeholder(tf.float32, shape=[1, 2])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([1, 2], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
p1 = tf.placeholder(tf.float32, shape=[None, 2])
p2 = tf.placeholder(tf.float32, shape=[None, 2])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([None, 2], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
p1 = tf.placeholder(tf.float32, shape=[None, None])
p2 = tf.placeholder(tf.float32, shape=[None, None])
m, index = control_flow_ops.merge([p1, p2])
self.assertEqual([None, None], m.get_shape().as_list())
self.assertEqual([], index.get_shape())
def testRefSelect(self):
index = tf.placeholder(tf.int32)
# All inputs unknown.
p1 = tf.placeholder(tf.float32)
p2 = tf.placeholder(tf.float32)
p3 = tf.placeholder(tf.float32)
v1 = tf.Variable(p1, validate_shape=False)
v2 = tf.Variable(p2, validate_shape=False)
v3 = tf.Variable(p3, validate_shape=False)
s = control_flow_ops.ref_select(index, [v1, v2, v3])
self.assertIs(None, s.get_shape().ndims)
# All inputs known but different.
v1 = tf.Variable([[1, 2]])
v2 = tf.Variable([[2], [1]])
s = control_flow_ops.ref_select(index, [v1, v2])
self.assertIs(None, s.get_shape().ndims)
# All inputs known and same.
v1 = tf.Variable([[1, 2]])
v2 = tf.Variable([[1, 2]])
s = control_flow_ops.ref_select(index, [v1, v2])
self.assertEqual([1, 2], s.get_shape())
# Possibly the same but not guaranteed.
v1 = tf.Variable([[1., 2.]])
p2 = tf.placeholder(tf.float32, shape=[None, 2])
v2 = tf.Variable(p2, validate_shape=False)
s = control_flow_ops.ref_select(index, [v1, v2])
self.assertEqual(None, s.get_shape())
def testRunLoopTensor(self):
with self.test_session() as sess:
tensor_list = []
def condition(t):
return t < tf.constant(5)
def body(_):
tensor_list.append(tf.constant(5))
return tf.constant(10)
result = tf.while_loop(condition, body, [tf.constant(4)])
self.assertEqual(10, sess.run(result))
# Ensure that we cannot run a tensor that escapes the loop body
# accidentally.
with self.assertRaises(ValueError):
sess.run(tensor_list[0])
class TupleTest(tf.test.TestCase):
def testTensors(self):
for v1_first in [True, False]:
with self.test_session():
v1 = tf.Variable([1.0])
add1 = tf.add(
control_flow_ops.with_dependencies([v1.initializer], v1.ref()),
2.0)
v2 = tf.Variable([10.0])
add2 = tf.add(
control_flow_ops.with_dependencies([v2.initializer], v2.ref()),
20.0)
t1, _, t2 = control_flow_ops.tuple([add1, None, add2])
# v1 is not initialized.
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v1.eval()
# v2 is not initialized.
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v2.eval()
if v1_first:
# Getting t1 initializes v2.
self.assertAllClose([3.0], t1.eval())
self.assertAllClose([10.0], v2.eval())
else:
# Getting t2 initializes v1.
self.assertAllClose([30.0], t2.eval())
self.assertAllClose([1.0], v1.eval())
def testIndexedSlices(self):
for v1_first in [True, False]:
with self.test_session():
v1 = tf.Variable(
np.array([[0.0, 1.0], [10.0, 11.0], [20.0, 21.0]]).astype(
np.float32))
v1_at_1 = tf.IndexedSlices(
control_flow_ops.with_dependencies([v1.initializer], v1.ref()),
tf.constant([1]))
v2 = tf.Variable(
np.array([[0.1, 1.1], [10.1, 11.1], [20.1, 21.1]]).astype(
np.float32))
v2_at_1 = tf.IndexedSlices(
control_flow_ops.with_dependencies([v2.initializer], v2.ref()),
tf.constant([1]))
st1, st2 = control_flow_ops.tuple([v1_at_1, v2_at_1])
g1 = tf.gather(st1.values, st1.indices)
g2 = tf.gather(st2.values, st2.indices)
# v1 is not initialized.
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v1.eval()
# v2 is not initialized.
with self.assertRaisesOpError("Attempting to use uninitialized value"):
v2.eval()
if v1_first:
# Getting g1 initializes v2.
self.assertAllClose([[10.0, 11.0]], g1.eval())
self.assertAllClose([[0.1, 1.1], [10.1, 11.1], [20.1, 21.1]],
v2.eval())
else:
# Getting g2 initializes v1.
self.assertAllClose([[10.1, 11.1]], g2.eval())
self.assertAllClose([[0.0, 1.0], [10.0, 11.0], [20.0, 21.0]],
v1.eval())
def testAcceptTensorsAsControlInputs(self):
with self.test_session():
var = tf.Variable(0)
assign = tf.assign(var, 1)
t, = tf.tuple([tf.constant(0)], control_inputs=[assign])
# Should trigger the assign.
t.eval()
self.assertEquals(1, var.eval())
def testWhilePyFuncBasic(self):
def func(x):
return np.square(x)
with self.test_session():
r = tf.while_loop(
lambda i, v: i < 4,
lambda i, v: [i + 1, tf.py_func(func, [v], [tf.float32])[0]],
[tf.constant(0), tf.constant(2.0, tf.float32)])
self.assertEqual(r[1].eval(), 65536.0)
def testWhileFuncBasic(self):
@function.Defun(tf.float32)
def func(x):
return tf.square(tf.square(x))
with self.test_session():
x = tf.constant(2.0, tf.float32)
r = tf.while_loop(
lambda i, v: i < 2,
lambda i, v: [i + 1, func(v)],
[tf.constant(0), x])
self.assertEqual(r[1].eval(), 65536.0)
r = tf.gradients(r, x)[0]
self.assertEqual(r.eval(), 524288.0)
self.assertEqual(len([op for op in x.graph.get_operations()
if op.type == "Stack"]),
1)
if __name__ == "__main__":
tf.test.main()
|
from django.http import HttpResponse
from django.core import serializers
from django.shortcuts import render
from simple_rest import Resource
from .models import Client
def index(request):
return render(request, 'index.html')
class Clients(Resource):
def get(self, request):
clients = Client.objects.all() \
.filter(name__contains = request.GET.get('name')) \
.filter(address__contains = request.GET.get('address'));
return HttpResponse(self.to_json(clients), content_type = 'application/json', status = 200)
def post(self, request):
Client.objects.create(
name = request.POST.get("name"),
age = request.POST.get("age"),
address = request.POST.get("address"),
married = True if request.POST.get("married") == 'true' else False
)
return HttpResponse(status = 201)
def put(self, request, client_id):
client = Client.objects.get(pk = client_id)
client.name = request.PUT.get("name")
client.age = request.PUT.get("age")
client.address = request.PUT.get("address")
client.married = True if request.PUT.get("married") == 'true' else False
client.save()
return HttpResponse(status = 200)
def delete(self, request, client_id):
client = Client.objects.get(pk = client_id)
client.delete()
return HttpResponse(status = 200)
def to_json(self, objects):
return serializers.serialize('json', objects)
|
# Copyright (C) 2016-2017 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# Not installing aliases from python-future; it's unreliable and slow.
from builtins import * # noqa
from ycm.tests.test_utils import ( ExtendedMock, MockVimBuffers, MockVimModule,
VimBuffer, VimMatch )
MockVimModule()
import os
import sys
from hamcrest import ( assert_that, contains, empty, is_in, is_not, has_length,
matches_regexp )
from mock import call, MagicMock, patch
from ycm.tests import StopServer, test_utils, YouCompleteMeInstance
from ycm.client.base_request import _LoadExtraConfFile
from ycmd.responses import ServerError
@YouCompleteMeInstance()
def YouCompleteMe_YcmCoreNotImported_test( ycm ):
assert_that( 'ycm_core', is_not( is_in( sys.modules ) ) )
@YouCompleteMeInstance()
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
def RunNotifyUserIfServerCrashed( ycm, test, post_vim_message ):
StopServer( ycm )
ycm._logger = MagicMock( autospec = True )
ycm._server_popen = MagicMock( autospec = True )
ycm._server_popen.poll.return_value = test[ 'return_code' ]
ycm._server_popen.stderr.read.return_value = test[ 'stderr_output' ]
ycm._NotifyUserIfServerCrashed()
assert_that( ycm._logger.method_calls,
has_length( len( test[ 'expected_logs' ] ) ) )
ycm._logger.error.assert_has_calls(
[ call( log ) for log in test[ 'expected_logs' ] ] )
post_vim_message.assert_has_exact_calls( [
call( test[ 'expected_vim_message' ] )
] )
def YouCompleteMe_NotifyUserIfServerCrashed_UnexpectedCore_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"Unexpected error while loading the YCM core library. "
"Use the ':YcmToggleLogs' command to check the logs." )
RunNotifyUserIfServerCrashed( {
'return_code': 3,
'stderr_output' : '',
'expected_logs': [ message ],
'expected_vim_message': message
} )
def YouCompleteMe_NotifyUserIfServerCrashed_MissingCore_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"YCM core library not detected; you need to compile YCM before "
"using it. Follow the instructions in the documentation." )
RunNotifyUserIfServerCrashed( {
'return_code': 4,
'stderr_output': '',
'expected_logs': [ message ],
'expected_vim_message': message
} )
def YouCompleteMe_NotifyUserIfServerCrashed_Python2Core_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"YCM core library compiled for Python 2 but loaded in Python 3. "
"Set the 'g:ycm_server_python_interpreter' option to a Python 2 "
"interpreter path." )
RunNotifyUserIfServerCrashed( {
'return_code': 5,
'stderr_output': '',
'expected_logs': [ message ],
'expected_vim_message': message
} )
def YouCompleteMe_NotifyUserIfServerCrashed_Python3Core_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"YCM core library compiled for Python 3 but loaded in Python 2. "
"Set the 'g:ycm_server_python_interpreter' option to a Python 3 "
"interpreter path." )
RunNotifyUserIfServerCrashed( {
'return_code': 6,
'stderr_output': '',
'expected_logs': [ message ],
'expected_vim_message': message
} )
def YouCompleteMe_NotifyUserIfServerCrashed_OutdatedCore_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"YCM core library too old; PLEASE RECOMPILE by running the "
"install.py script. See the documentation for more details." )
RunNotifyUserIfServerCrashed( {
'return_code': 7,
'stderr_output': '',
'expected_logs': [ message ],
'expected_vim_message': message
} )
def YouCompleteMe_NotifyUserIfServerCrashed_UnexpectedExitCode_test():
message = ( "The ycmd server SHUT DOWN (restart with ':YcmRestartServer'). "
"Unexpected exit code 1. Use the ':YcmToggleLogs' command to "
"check the logs." )
RunNotifyUserIfServerCrashed( {
'return_code': 1,
'stderr_output': 'First line\r\n'
'Second line',
'expected_logs': [ 'First line\n'
'Second line',
message ],
'expected_vim_message': message
} )
@YouCompleteMeInstance( { 'extra_conf_vim_data': [ 'tempname()' ] } )
def YouCompleteMe_DebugInfo_ServerRunning_test( ycm ):
dir_of_script = os.path.dirname( os.path.abspath( __file__ ) )
buf_name = os.path.join( dir_of_script, 'testdata', 'test.cpp' )
extra_conf = os.path.join( dir_of_script, 'testdata', '.ycm_extra_conf.py' )
_LoadExtraConfFile( extra_conf )
current_buffer = VimBuffer( buf_name, filetype='cpp' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
assert_that(
ycm.DebugInfo(),
matches_regexp(
'Client logfile: .+\n'
'Server Python interpreter: .+\n'
'Server Python version: .+\n'
'Server has Clang support compiled in: '
'(?P<CLANG>True)?(?(CLANG)|False)\n'
'Clang version: .+\n'
'Extra configuration file found and loaded\n'
'Extra configuration path: .*testdata[/\\\\]\\.ycm_extra_conf\\.py\n'
'(?(CLANG)C-family completer debug information:\n'
' Compilation database path: None\n'
' Flags: \\[\'_TEMP_FILE_\'.*\\]\n)'
'Server running at: .+\n'
'Server process ID: \d+\n'
'Server logfiles:\n'
' .+\n'
' .+' )
)
@YouCompleteMeInstance()
def YouCompleteMe_DebugInfo_ServerNotRunning_test( ycm ):
StopServer( ycm )
current_buffer = VimBuffer( 'current_buffer' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
assert_that(
ycm.DebugInfo(),
matches_regexp(
'Client logfile: .+\n'
'Server errored, no debug info from server\n'
'Server running at: .+\n'
'Server process ID: \d+\n'
'Server logfiles:\n'
' .+\n'
' .+' )
)
@YouCompleteMeInstance()
def YouCompleteMe_OnVimLeave_RemoveClientLogfileByDefault_test( ycm ):
client_logfile = ycm._client_logfile
assert_that( os.path.isfile( client_logfile ),
'Logfile {0} does not exist.'.format( client_logfile ) )
ycm.OnVimLeave()
assert_that( not os.path.isfile( client_logfile ),
'Logfile {0} was not removed.'.format( client_logfile ) )
@YouCompleteMeInstance( { 'keep_logfiles': 1 } )
def YouCompleteMe_OnVimLeave_KeepClientLogfile_test( ycm ):
client_logfile = ycm._client_logfile
assert_that( os.path.isfile( client_logfile ),
'Logfile {0} does not exist.'.format( client_logfile ) )
ycm.OnVimLeave()
assert_that( os.path.isfile( client_logfile ),
'Logfile {0} was removed.'.format( client_logfile ) )
@YouCompleteMeInstance()
@patch( 'ycm.vimsupport.CloseBuffersForFilename', new_callable = ExtendedMock )
@patch( 'ycm.vimsupport.OpenFilename', new_callable = ExtendedMock )
def YouCompleteMe_ToggleLogs_WithParameters_test( ycm,
open_filename,
close_buffers_for_filename ):
logfile_buffer = VimBuffer( ycm._client_logfile, window = 1 )
with MockVimBuffers( [ logfile_buffer ], logfile_buffer ):
ycm.ToggleLogs( os.path.basename( ycm._client_logfile ),
'nonexisting_logfile',
os.path.basename( ycm._server_stdout ) )
open_filename.assert_has_exact_calls( [
call( ycm._server_stdout, { 'size': 12,
'watch': True,
'fix': True,
'focus': False,
'position': 'end' } )
] )
close_buffers_for_filename.assert_has_exact_calls( [
call( ycm._client_logfile )
] )
@YouCompleteMeInstance()
@patch( 'ycm.vimsupport.PostVimMessage' )
def YouCompleteMe_ToggleLogs_WithoutParameters_test( ycm, post_vim_message ):
# We test on a Python buffer because the Python completer has subserver
# logfiles.
python_buffer = VimBuffer( 'buffer.py', filetype = 'python' )
with MockVimBuffers( [ python_buffer ], python_buffer ):
ycm.ToggleLogs()
assert_that(
# Argument passed to PostVimMessage.
post_vim_message.call_args[ 0 ][ 0 ],
matches_regexp(
'Available logfiles are:\n'
'jedihttp_\d+_stderr_.+.log\n'
'jedihttp_\d+_stdout_.+.log\n'
'ycm_.+.log\n'
'ycmd_\d+_stderr_.+.log\n'
'ycmd_\d+_stdout_.+.log' )
)
@YouCompleteMeInstance()
def YouCompleteMe_GetDefinedSubcommands_ListFromServer_test( ycm ):
current_buffer = VimBuffer( 'buffer' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.base_request.JsonFromFuture',
return_value = [ 'SomeCommand', 'AnotherCommand' ] ):
assert_that(
ycm.GetDefinedSubcommands(),
contains(
'SomeCommand',
'AnotherCommand'
)
)
@YouCompleteMeInstance()
@patch( 'ycm.client.base_request._logger', autospec = True )
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
def YouCompleteMe_GetDefinedSubcommands_ErrorFromServer_test( ycm,
post_vim_message,
logger ):
current_buffer = VimBuffer( 'buffer' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.base_request.JsonFromFuture',
side_effect = ServerError( 'Server error' ) ):
result = ycm.GetDefinedSubcommands()
logger.exception.assert_called_with( 'Error while handling server response' )
post_vim_message.assert_has_exact_calls( [
call( 'Server error', truncate = False )
] )
assert_that( result, empty() )
@YouCompleteMeInstance()
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
def YouCompleteMe_ShowDetailedDiagnostic_MessageFromServer_test(
ycm, post_vim_message ):
current_buffer = VimBuffer( 'buffer' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.base_request.JsonFromFuture',
return_value = { 'message': 'some_detailed_diagnostic' } ):
ycm.ShowDetailedDiagnostic(),
post_vim_message.assert_has_exact_calls( [
call( 'some_detailed_diagnostic', warning = False )
] )
@YouCompleteMeInstance()
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
def YouCompleteMe_ShowDiagnostics_FiletypeNotSupported_test( ycm,
post_vim_message ):
current_buffer = VimBuffer( 'buffer', filetype = 'not_supported' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
ycm.ShowDiagnostics()
post_vim_message.assert_called_once_with(
'Native filetype completion not supported for current file, '
'cannot force recompilation.', warning = False )
@YouCompleteMeInstance()
@patch( 'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = True )
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@patch( 'ycm.vimsupport.SetLocationList', new_callable = ExtendedMock )
def YouCompleteMe_ShowDiagnostics_NoDiagnosticsDetected_test(
ycm, set_location_list, post_vim_message, *args ):
current_buffer = VimBuffer( 'buffer', filetype = 'cpp' )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.event_notification.EventNotification.Response',
return_value = {} ):
ycm.ShowDiagnostics()
post_vim_message.assert_has_exact_calls( [
call( 'Forcing compilation, this will block Vim until done.',
warning = False ),
call( 'Diagnostics refreshed', warning = False ),
call( 'No warnings or errors detected.', warning = False )
] )
set_location_list.assert_called_once_with( [] )
@YouCompleteMeInstance( { 'log_level': 'debug',
'keep_logfiles': 1,
'open_loclist_on_ycm_diags': 0 } )
@patch( 'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = True )
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@patch( 'ycm.vimsupport.SetLocationList', new_callable = ExtendedMock )
def YouCompleteMe_ShowDiagnostics_DiagnosticsFound_DoNotOpenLocationList_test(
ycm, set_location_list, post_vim_message, *args ):
diagnostic = {
'kind': 'ERROR',
'text': 'error text',
'location': {
'filepath': 'buffer',
'line_num': 19,
'column_num': 2
}
}
current_buffer = VimBuffer( 'buffer', filetype = 'cpp', number = 3 )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.event_notification.EventNotification.Response',
return_value = [ diagnostic ] ):
ycm.ShowDiagnostics()
post_vim_message.assert_has_exact_calls( [
call( 'Forcing compilation, this will block Vim until done.',
warning = False ),
call( 'Diagnostics refreshed', warning = False )
] )
set_location_list.assert_called_once_with( [ {
'bufnr': 3,
'lnum': 19,
'col': 2,
'text': 'error text',
'type': 'E',
'valid': 1
} ] )
@YouCompleteMeInstance( { 'open_loclist_on_ycm_diags': 1 } )
@patch( 'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = True )
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@patch( 'ycm.vimsupport.SetLocationList', new_callable = ExtendedMock )
@patch( 'ycm.vimsupport.OpenLocationList', new_callable = ExtendedMock )
def YouCompleteMe_ShowDiagnostics_DiagnosticsFound_OpenLocationList_test(
ycm, open_location_list, set_location_list, post_vim_message, *args ):
diagnostic = {
'kind': 'ERROR',
'text': 'error text',
'location': {
'filepath': 'buffer',
'line_num': 19,
'column_num': 2
}
}
current_buffer = VimBuffer( 'buffer', filetype = 'cpp', number = 3 )
with MockVimBuffers( [ current_buffer ], current_buffer ):
with patch( 'ycm.client.event_notification.EventNotification.Response',
return_value = [ diagnostic ] ):
ycm.ShowDiagnostics()
post_vim_message.assert_has_exact_calls( [
call( 'Forcing compilation, this will block Vim until done.',
warning = False ),
call( 'Diagnostics refreshed', warning = False )
] )
set_location_list.assert_called_once_with( [ {
'bufnr': 3,
'lnum': 19,
'col': 2,
'text': 'error text',
'type': 'E',
'valid': 1
} ] )
open_location_list.assert_called_once_with( focus = True )
@YouCompleteMeInstance( { 'echo_current_diagnostic': 1,
'enable_diagnostic_signs': 1,
'enable_diagnostic_highlighting': 1 } )
@patch( 'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = True )
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@patch( 'vim.command', new_callable = ExtendedMock )
def YouCompleteMe_UpdateDiagnosticInterface_PrioritizeErrorsOverWarnings_test(
ycm, vim_command, post_vim_message, *args ):
contents = """int main() {
int x, y;
x == y
}"""
# List of diagnostics returned by ycmd for the above code.
diagnostics = [ {
'kind': 'ERROR',
'text': "expected ';' after expression",
'location': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 9
},
# Looks strange but this is really what ycmd is returning.
'location_extent': {
'start': {
'filepath': '',
'line_num': 0,
'column_num': 0,
},
'end': {
'filepath': '',
'line_num': 0,
'column_num': 0,
}
},
'ranges': [],
'fixit_available': True
}, {
'kind': 'WARNING',
'text': 'equality comparison result unused',
'location': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 7,
},
'location_extent': {
'start': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 5,
},
'end': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 7,
}
},
'ranges': [ {
'start': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 3,
},
'end': {
'filepath': 'buffer',
'line_num': 3,
'column_num': 9,
}
} ],
'fixit_available': True
} ]
current_buffer = VimBuffer( 'buffer',
filetype = 'c',
contents = contents.splitlines(),
number = 5,
window = 2 )
test_utils.VIM_MATCHES = []
with MockVimBuffers( [ current_buffer ], current_buffer, ( 3, 1 ) ):
with patch( 'ycm.client.event_notification.EventNotification.Response',
return_value = diagnostics ):
ycm.OnFileReadyToParse()
ycm.HandleFileParseRequest( block = True )
# Error match is added after warning matches.
assert_that(
test_utils.VIM_MATCHES,
contains(
VimMatch( 'YcmWarningSection', '\%3l\%5c\_.\{-}\%3l\%7c' ),
VimMatch( 'YcmWarningSection', '\%3l\%3c\_.\{-}\%3l\%9c' ),
VimMatch( 'YcmErrorSection', '\%3l\%8c' )
)
)
# Only the error sign is placed.
vim_command.assert_has_exact_calls( [
call( 'sign define ycm_dummy_sign' ),
call( 'sign place 3 name=ycm_dummy_sign line=3 buffer=5' ),
call( 'sign place 1 name=YcmError line=3 buffer=5' ),
call( 'sign undefine ycm_dummy_sign' ),
call( 'sign unplace 3 buffer=5' )
] )
# When moving the cursor on the diagnostics, the error is displayed to the
# user, not the warning.
ycm.OnCursorMoved()
post_vim_message.assert_has_exact_calls( [
call( "expected ';' after expression (FixIt)",
truncate = True, warning = False )
] )
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
import hashlib
import operator
from concurrent.futures import as_completed
from c7n.actions import Action
from c7n.exceptions import PolicyValidationError
from c7n.filters import Filter, CrossAccountAccessFilter
from c7n.query import QueryResourceManager, TypeInfo
from c7n.manager import resources
from c7n.tags import universal_augment
from c7n.utils import chunks, get_retry, local_session, type_schema, filter_empty
from c7n.version import version
from .aws import shape_validate
from .ec2 import EC2
@resources.register('ssm-parameter')
class SSMParameter(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_parameters', 'Parameters', None)
name = "Name"
id = "Name"
universal_taggable = True
arn_type = "parameter"
cfn_type = 'AWS::SSM::Parameter'
retry = staticmethod(get_retry(('Throttled',)))
permissions = ('ssm:GetParameters',
'ssm:DescribeParameters')
augment = universal_augment
@SSMParameter.action_registry.register('delete')
class DeleteParameter(Action):
schema = type_schema('delete')
permissions = ("ssm:DeleteParameter",)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for r in resources:
self.manager.retry(
client.delete_parameter, Name=r['Name'],
ignore_err_codes=('ParameterNotFound',))
@resources.register('ssm-managed-instance')
class ManagedInstance(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_instance_information', 'InstanceInformationList', None)
id = 'InstanceId'
name = 'Name'
date = 'RegistrationDate'
arn_type = "managed-instance"
permissions = ('ssm:DescribeInstanceInformation',)
@EC2.action_registry.register('send-command')
@ManagedInstance.action_registry.register('send-command')
class SendCommand(Action):
"""Run an SSM Automation Document on an instance.
:Example:
Find ubuntu 18.04 instances are active with ssm.
.. code-block:: yaml
policies:
- name: ec2-osquery-install
resource: ec2
filters:
- type: ssm
key: PingStatus
value: Online
- type: ssm
key: PlatformName
value: Ubuntu
- type: ssm
key: PlatformVersion
value: 18.04
actions:
- type: send-command
command:
DocumentName: AWS-RunShellScript
Parameters:
commands:
- wget https://pkg.osquery.io/deb/osquery_3.3.0_1.linux.amd64.deb
- dpkg -i osquery_3.3.0_1.linux.amd64.deb
"""
schema = type_schema(
'send-command',
command={'type': 'object'},
required=('command',))
permissions = ('ssm:SendCommand',)
shape = "SendCommandRequest"
annotation = 'c7n:SendCommand'
def validate(self):
shape_validate(self.data['command'], self.shape, 'ssm')
# If used against an ec2 resource, require an ssm status filter
# to ensure that we're not trying to send commands to instances
# that aren't in ssm.
if self.manager.type != 'ec2':
return
found = False
for f in self.manager.iter_filters():
if f.type == 'ssm':
found = True
break
if not found:
raise PolicyValidationError(
"send-command requires use of ssm filter on ec2 resources")
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for resource_set in chunks(resources, 50):
self.process_resource_set(client, resource_set)
def process_resource_set(self, client, resources):
command = dict(self.data['command'])
command['InstanceIds'] = [
r['InstanceId'] for r in resources]
result = client.send_command(**command).get('Command')
for r in resources:
r.setdefault('c7n:SendCommand', []).append(result['CommandId'])
@resources.register('ssm-activation')
class SSMActivation(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_activations', 'ActivationList', None)
id = 'ActivationId'
name = 'Description'
date = 'CreatedDate'
arn = False
permissions = ('ssm:DescribeActivations',)
@SSMActivation.action_registry.register('delete')
class DeleteSSMActivation(Action):
schema = type_schema('delete')
permissions = ('ssm:DeleteActivation',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for a in resources:
client.delete_activation(ActivationId=a["ActivationId"])
@resources.register('ops-item')
class OpsItem(QueryResourceManager):
"""Resource for OpsItems in SSM OpsCenter
https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html
"""
class resource_type(TypeInfo):
enum_spec = ('describe_ops_items', 'OpsItemSummaries', None)
service = 'ssm'
arn_type = 'opsitem'
id = 'OpsItemId'
name = 'Title'
default_report_fields = (
'Status', 'Title', 'LastModifiedTime',
'CreatedBy', 'CreatedTime')
QueryKeys = {
'Status',
'CreatedBy',
'Source',
'Priority',
'Title',
'OpsItemId',
'CreatedTime',
'LastModifiedTime',
'OperationalData',
'OperationalDataKey',
'OperationalDataValue',
'ResourceId',
'AutomationId'}
QueryOperators = {'Equal', 'LessThan', 'GreaterThan', 'Contains'}
def validate(self):
self.query = self.resource_query()
return super(OpsItem, self).validate()
def get_resources(self, ids, cache=True, augment=True):
if isinstance(ids, str):
ids = [ids]
return self.resources({
'OpsItemFilters': [{
'Key': 'OpsItemId',
'Values': [i],
'Operator': 'Equal'} for i in ids]})
def resources(self, query=None):
q = self.resource_query()
if q and query and 'OpsItemFilters' in query:
q['OpsItemFilters'].extend(query['OpsItemFilters'])
return super(OpsItem, self).resources(query=q)
def resource_query(self):
filters = []
for q in self.data.get('query', ()):
if (not isinstance(q, dict) or
not set(q.keys()) == {'Key', 'Values', 'Operator'} or
q['Key'] not in self.QueryKeys or
q['Operator'] not in self.QueryOperators):
raise PolicyValidationError(
"invalid ops-item query %s" % self.data['query'])
filters.append(q)
return {'OpsItemFilters': filters}
@OpsItem.action_registry.register('update')
class UpdateOpsItem(Action):
"""Update an ops item.
: example :
Close out open ops items older than 30 days for a given issue.
.. code-block:: yaml
policies:
- name: issue-items
resource: aws.ops-item
filters:
- Status: Open
- Title: checking-lambdas
- type: value
key: CreatedTime
value_type: age
op: greater-than
value: 30
actions:
- type: update
status: Resolved
"""
schema = type_schema(
'update',
description={'type': 'string'},
priority={'enum': list(range(1, 6))},
title={'type': 'string'},
topics={'type': 'array', 'items': {'type': 'string'}},
status={'enum': ['Open', 'In Progress', 'Resolved']},
)
permissions = ('ssm:UpdateOpsItem',)
def process(self, resources):
attrs = dict(self.data)
attrs = filter_empty({
'Description': attrs.get('description'),
'Title': attrs.get('title'),
'Priority': attrs.get('priority'),
'Status': attrs.get('status'),
'Notifications': [{'Arn': a} for a in attrs.get('topics', ())]})
modified = []
for r in resources:
for k, v in attrs.items():
if k not in r or r[k] != v:
modified.append(r)
self.log.debug("Updating %d of %d ops items", len(modified), len(resources))
client = local_session(self.manager.session_factory).client('ssm')
for m in modified:
client.update_ops_item(OpsItemId=m['OpsItemId'], **attrs)
class OpsItemFilter(Filter):
"""Filter resources associated to extant OpsCenter operational items.
:example:
Find ec2 instances with open ops items.
.. code-block:: yaml
policies:
- name: ec2-instances-ops-items
resource: ec2
filters:
- type: ops-item
# we can filter on source, title, priority
priority: [1, 2]
"""
schema = type_schema(
'ops-item',
status={'type': 'array',
'default': ['Open'],
'items': {'enum': ['Open', 'In progress', 'Resolved']}},
priority={'type': 'array', 'items': {'enum': list(range(1, 6))}},
title={'type': 'string'},
source={'type': 'string'})
schema_alias = True
permissions = ('ssm:DescribeOpsItems',)
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('ssm')
results = []
for resource_set in chunks(resources, 10):
qf = self.get_query_filter(resource_set)
items = client.describe_ops_items(**qf).get('OpsItemSummaries')
arn_item_map = {}
for i in items:
for arn in json.loads(
i['OperationalData']['/aws/resources']['Value']):
arn_item_map.setdefault(arn['arn'], []).append(i['OpsItemId'])
for arn, r in zip(self.manager.get_arns(resource_set), resource_set):
if arn in arn_item_map:
r['c7n:opsitems'] = arn_item_map[arn]
results.append(r)
return results
def get_query_filter(self, resources):
q = []
q.append({'Key': 'Status', 'Operator': 'Equal',
'Values': self.data.get('status', ('Open',))})
if self.data.get('priority'):
q.append({'Key': 'Priority', 'Operator': 'Equal',
'Values': list(map(str, self.data['priority']))})
if self.data.get('title'):
q.append({'Key': 'Title', 'Operator': 'Contains',
'Values': [self.data['title']]})
if self.data.get('source'):
q.append({'Key': 'Source', 'Operator': 'Equal',
'Values': [self.data['source']]})
q.append({'Key': 'ResourceId', 'Operator': 'Contains',
'Values': [r[self.manager.resource_type.id] for r in resources]})
return {'OpsItemFilters': q}
@classmethod
def register_resource(cls, registry, resource_class):
if 'ops-item' not in resource_class.filter_registry:
resource_class.filter_registry.register('ops-item', cls)
resources.subscribe(OpsItemFilter.register_resource)
class PostItem(Action):
"""Post an OpsItem to AWS Systems Manager OpsCenter Dashboard.
https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html
Each ops item supports up to a 100 associated resources. This
action supports the builtin OpsCenter dedup logic with additional
support for associating new resources to existing Open ops items.
: Example :
Create an ops item for ec2 instances with Create User permissions
.. code-block:: yaml
policies:
- name: over-privileged-ec2
resource: aws.ec2
filters:
- type: check-permissions
match: allowed
actions:
- iam:CreateUser
actions:
- type: post-item
priority: 3
The builtin OpsCenter dedup logic will kick in if the same
resource set (ec2 instances in this case) is posted for the same
policy.
: Example :
Create an ops item for sqs queues with cross account access as ops items.
.. code-block:: yaml
policies:
- name: sqs-cross-account-access
resource: aws.sqs
filters:
- type: cross-account
actions:
- type: mark-for-op
days: 5
op: delete
- type: post-item
title: SQS Cross Account Access
description: |
Cross Account Access detected in SQS resource IAM Policy.
tags:
Topic: Security
"""
schema = type_schema(
'post-item',
description={'type': 'string'},
tags={'type': 'object'},
priority={'enum': list(range(1, 6))},
title={'type': 'string'},
topics={'type': 'string'},
)
schema_alias = True
permissions = ('ssm:CreateOpsItem',)
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('ssm')
item_template = self.get_item_template()
resources = list(sorted(resources, key=operator.itemgetter(
self.manager.resource_type.id)))
items = self.get_items(client, item_template)
if items:
# - Use a copy of the template as we'll be passing in status changes on updates.
# - The return resources will be those that we couldn't fit into updates
# to existing resources.
resources = self.update_items(client, items, dict(item_template), resources)
item_ids = [i['OpsItemId'] for i in items[:5]]
for resource_set in chunks(resources, 100):
resource_arns = json.dumps(
[{'arn': arn} for arn in sorted(self.manager.get_arns(resource_set))])
item_template['OperationalData']['/aws/resources'] = {
'Type': 'SearchableString', 'Value': resource_arns}
if items:
item_template['RelatedOpsItems'] = [
{'OpsItemId': item_ids[:5]}]
try:
oid = client.create_ops_item(**item_template).get('OpsItemId')
item_ids.insert(0, oid)
except client.exceptions.OpsItemAlreadyExistsException:
pass
for r in resource_set:
r['c7n:opsitem'] = oid
def get_items(self, client, item_template):
qf = [
{'Key': 'OperationalDataValue',
'Operator': 'Contains',
'Values': [item_template['OperationalData'][
'/custodian/dedup']['Value']]},
{'Key': 'OperationalDataKey',
'Operator': 'Equal',
'Values': ['/custodian/dedup']},
{'Key': 'Status',
'Operator': 'Equal',
# In progress could imply activity/executions underway, we don't want to update
# the resource set out from underneath that so only look at Open state.
'Values': ['Open']},
{'Key': 'Source',
'Operator': 'Equal',
'Values': ['Cloud Custodian']}]
items = client.describe_ops_items(OpsItemFilters=qf)['OpsItemSummaries']
return list(sorted(items, key=operator.itemgetter('CreatedTime'), reverse=True))
def update_items(self, client, items, item_template, resources):
"""Update existing Open OpsItems with new resources.
Originally this tried to support attribute updates as well, but
the reasoning around that is a bit complex due to partial state
evaluation around any given execution, so its restricted atm
to just updating associated resources.
For management of ops items, use a policy on the
ops-item resource.
Rationale: Typically a custodian policy will be evaluating
some partial set of resources at any given execution (ie think
a lambda looking at newly created resources), where as a
collection of ops center items will represent the total
set. Custodian can multiplex the partial set of resource over
a set of ops items (100 resources per item) which minimizes
the item count. When updating the state of an ops item though,
we have to contend with the possibility that we're doing so
with only a partial state. Which could be confusing if we
tried to set the Status to Resolved even if we're only evaluating
a handful of resources associated to an ops item.
"""
arn_item_map = {}
item_arn_map = {}
for i in items:
item_arn_map[i['OpsItemId']] = arns = json.loads(
i['OperationalData']['/aws/resources']['Value'])
for arn in arns:
arn_item_map[arn['arn']] = i['OpsItemId']
arn_resource_map = dict(zip(self.manager.get_arns(resources), resources))
added = set(arn_resource_map).difference(arn_item_map)
updated = set()
remainder = []
# Check for resource additions
for a in added:
handled = False
for i in items:
if len(item_arn_map[i['OpsItemId']]) >= 100:
continue
item_arn_map[i['OpsItemId']].append({'arn': a})
updated.add(i['OpsItemId'])
arn_resource_map[a]['c7n:opsitem'] = i['OpsItemId']
handled = True
break
if not handled:
remainder.append(a)
for i in items:
if not i['OpsItemId'] in updated:
continue
i = dict(i)
for k in ('CreatedBy', 'CreatedTime', 'Source', 'LastModifiedBy',
'LastModifiedTime'):
i.pop(k, None)
i['OperationalData']['/aws/resources']['Value'] = json.dumps(
item_arn_map[i['OpsItemId']])
i['OperationalData'].pop('/aws/dedup', None)
client.update_ops_item(**i)
return remainder
def get_item_template(self):
title = self.data.get('title', self.manager.data['name']).strip()
dedup = ("%s %s %s %s" % (
title,
self.manager.type,
self.manager.config.region,
self.manager.config.account_id)).encode('utf8')
# size restrictions on this value is 4-20, digest is 32
dedup = hashlib.md5(dedup).hexdigest()[:20] # nosec nosemgrep
i = dict(
Title=title,
Description=self.data.get(
'description',
self.manager.data.get(
'description',
self.manager.data.get('name'))),
Priority=self.data.get('priority'),
Source="Cloud Custodian",
Tags=[{'Key': k, 'Value': v} for k, v in self.data.get(
'tags', self.manager.data.get('tags', {})).items()],
Notifications=[{'Arn': a} for a in self.data.get('topics', ())],
OperationalData={
'/aws/dedup': {
'Type': 'SearchableString',
'Value': json.dumps({'dedupString': dedup})},
'/custodian/execution-id': {
'Type': 'String',
'Value': self.manager.ctx.execution_id},
# We need our own dedup string to be able to filter
# search on it.
'/custodian/dedup': {
'Type': 'SearchableString',
'Value': dedup},
'/custodian/policy': {
'Type': 'String',
'Value': json.dumps(self.manager.data)},
'/custodian/version': {
'Type': 'String',
'Value': version},
'/custodian/policy-name': {
'Type': 'SearchableString',
'Value': self.manager.data['name']},
'/custodian/resource': {
'Type': 'SearchableString',
'Value': self.manager.type},
}
)
return filter_empty(i)
@classmethod
def register_resource(cls, registry, resource_class):
if 'post-item' not in resource_class.action_registry:
resource_class.action_registry.register('post-item', cls)
resources.subscribe(PostItem.register_resource)
@resources.register('ssm-document')
class SSMDocument(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('list_documents', 'DocumentIdentifiers', {'Filters': [
{
'Key': 'Owner',
'Values': ['Self']}]})
name = 'Name'
date = 'RegistrationDate'
arn_type = 'Document'
permissions = ('ssm:ListDocuments',)
@SSMDocument.filter_registry.register('cross-account')
class SSMDocumentCrossAccount(CrossAccountAccessFilter):
"""Filter SSM documents which have cross account permissions
:example:
.. code-block:: yaml
policies:
- name: ssm-cross-account
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
"""
permissions = ('ssm:DescribeDocumentPermission',)
def process(self, resources, event=None):
self.accounts = self.get_accounts()
results = []
client = local_session(self.manager.session_factory).client('ssm')
with self.executor_factory(max_workers=3) as w:
futures = []
for resource_set in chunks(resources, 10):
futures.append(w.submit(
self.process_resource_set, client, resource_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception checking cross account access \n %s" % (
f.exception()))
continue
results.extend(f.result())
return results
def process_resource_set(self, client, resource_set):
results = []
for r in resource_set:
attrs = self.manager.retry(
client.describe_document_permission,
Name=r['Name'],
PermissionType='Share',
ignore_err_codes=('InvalidDocument',))['AccountSharingInfoList']
shared_accounts = {
g.get('AccountId') for g in attrs}
delta_accounts = shared_accounts.difference(self.accounts)
if delta_accounts:
r['c7n:CrossAccountViolations'] = list(delta_accounts)
results.append(r)
return results
@SSMDocument.action_registry.register('set-sharing')
class RemoveSharingSSMDocument(Action):
"""Edit list of accounts that share permissions on an SSM document. Pass in a list of account
IDs to the 'add' or 'remove' fields to edit document sharing permissions.
Set 'remove' to 'matched' to automatically remove any external accounts on a
document (use in conjunction with the cross-account filter).
:example:
.. code-block:: yaml
policies:
- name: ssm-set-sharing
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
actions:
- type: set-sharing
add: [yyyyyyyyyy]
remove: matched
"""
schema = type_schema('set-sharing',
remove={
'oneOf': [
{'enum': ['matched']},
{'type': 'array', 'items': {
'type': 'string'}},
]},
add={
'type': 'array', 'items': {
'type': 'string'}})
permissions = ('ssm:ModifyDocumentPermission',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
add_accounts = self.data.get('add', [])
remove_accounts = self.data.get('remove', [])
if self.data.get('remove') == 'matched':
for r in resources:
try:
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToAdd=add_accounts,
AccountIdsToRemove=r['c7n:CrossAccountViolations']
)
except client.exceptions.InvalidDocumentOperation as e:
raise(e)
else:
for r in resources:
try:
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToAdd=add_accounts,
AccountIdsToRemove=remove_accounts
)
except client.exceptions.InvalidDocumentOperation as e:
raise(e)
@SSMDocument.action_registry.register('delete')
class DeleteSSMDocument(Action):
"""Delete SSM documents. Set force flag to True to force delete on documents that are
shared across accounts. This will remove those shared accounts, and then delete the document.
Otherwise, delete will fail and raise InvalidDocumentOperation exception
if a document is shared with other accounts. Default value for force is False.
:example:
.. code-block:: yaml
policies:
- name: ssm-delete-documents
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
actions:
- type: delete
force: True
"""
schema = type_schema(
'delete',
force={'type': 'boolean'}
)
permissions = ('ssm:DeleteDocument', 'ssm:ModifyDocumentPermission',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for r in resources:
try:
client.delete_document(Name=r['Name'], Force=True)
except client.exceptions.InvalidDocumentOperation as e:
if self.data.get('force', False):
response = client.describe_document_permission(
Name=r['Name'],
PermissionType='Share'
)
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToRemove=response.get('AccountIds', [])
)
client.delete_document(
Name=r['Name'],
Force=True
)
else:
raise(e)
|
gdir = 'c:/users/batagelj/work/python/graph/graph'
# wdir = 'c:/users/batagelj/work/python/graph/JSON/test'
wdir = 'c:/users/batagelj/work/python/graph/JSON/SN5'
import sys, os, datetime, json
sys.path = [gdir]+sys.path; os.chdir(wdir)
import GraphNew as Graph
import TQ
# fJSON = 'ConnectivityWeighted.json'
# fJSON = "violenceE.json"
# fJSON = 'stem.json'
# fJSON = 'CcCtest.json'
# fJSON = 'Terror news 50.json'
fJSON = 'CcCSN5.json'
# S = Graph.Graph.loadNetJSON(fJSON); G = S.pairs2edges()
G = Graph.Graph.loadNetJSON(fJSON)
# G.saveNetJSON(file="Terror50E",indent=1)
# fJSON = 'ConnectivityTest.json'
# fJSON = 'ExampleB.json'
# fJSON = 'PathfinderTest.json'
# G = Graph.Graph.loadNetJSON(fJSON)
G.delLoops()
print("Temporal Ps cores in: ",fJSON)
t1 = datetime.datetime.now()
print("started: ",t1.ctime(),"\n")
Tmin,Tmax = G._graph['time']
D = { u: G.TQnetSum(u) for u in G._nodes }
# print("Sum =",D,"\n")
Core = { u: [d for d in D[u] if d[2]==0] for u in G.nodes() }
# core number = 0
D = { u: [d for d in D[u] if d[2]>0] for u in G.nodes() }
D = { u: d for u,d in D.items() if d!=[] }
Dmin = { u: min([e[2] for e in d]) for u,d in D.items() }
step = 0
while len(D)>0:
step += 1
dmin,u = min( (v,k) for k,v in Dmin.items() )
if step % 100 == 1:
print("{0:3d}. dmin={1:10.4f} node={2:4d}".format(step,dmin,u))
cCore = TQ.TQ.complement(Core[u],Tmin,Tmax+1)
core = TQ.TQ.extract(cCore,[d for d in D[u] if d[2] == dmin])
if core!=[]:
Core[u] = TQ.TQ.sum(Core[u],core)
D[u] = TQ.TQ.cutGE(TQ.TQ.sum(D[u],TQ.TQ.minus(core)),dmin)
for link in G.star(u):
v = G.twin(u,link)
if not(v in D): continue
chLink = TQ.TQ.minus(TQ.TQ.extract(core,G.getLink(link,'tq')))
if chLink==[]: continue
diff = TQ.TQ.cutGE(TQ.TQ.sum(D[v],chLink),0)
D[v] = [ (sd,fd,max(vd,dmin)) for sd,fd,vd in diff ]
if len(D[v])==0: del D[v]; del Dmin[v]
else: Dmin[v] = min([e[2] for e in D[v]])
if len(D[u])==0: del D[u]; del Dmin[u]
else: Dmin[u] = min([e[2] for e in D[u]])
print("{0:3d}. dmin={1:10.4f} node={2:4d}".format(step,dmin,u))
# print("\n-----\nCore =",Core)
t2 = datetime.datetime.now()
print("\nfinished: ",t2.ctime(),"\ntime used: ", t2-t1)
|
import subprocess
import benchmark.util as Util
import benchmark.tools.template
import benchmark.result as result
class Tool(benchmark.tools.template.BaseTool):
def getExecutable(self):
return Util.findExecutable('wolverine')
def getVersion(self, executable):
return subprocess.Popen([executable, '--version'],
stdout=subprocess.PIPE).communicate()[0].split()[1].strip()
def getName(self):
return 'Wolverine'
def getStatus(self, returncode, returnsignal, output, isTimeout):
if "VERIFICATION SUCCESSFUL" in output:
assert returncode == 0
status = result.STR_TRUE
elif "VERIFICATION FAILED" in output:
assert returncode == 10
status = result.STR_FALSE_LABEL
elif returnsignal == 9:
status = "TIMEOUT"
elif returnsignal == 6 or (returncode == 6 and "Out of memory" in output):
status = "OUT OF MEMORY"
elif returncode == 6 and "PARSING ERROR" in output:
status = "PARSING ERROR"
else:
status = "FAILURE"
return status
|
import sys
import os.path
import xml.etree.ElementTree as ET
flaglist = []
#VC7
flaglist.append({'name':'BasicRuntimeChecks', 'flag':'/GZ', 'value':'1'})
flaglist.append({'name':'BasicRuntimeChecks', 'flag':'/RTCsu', 'value':'3'})
flaglist.append({'name':'BasicRuntimeChecks', 'flag':'/RTCs', 'value':'1'})
flaglist.append({'name':'BasicRuntimeChecks', 'flag':'/RTCu', 'value':'2'})
flaglist.append({'name':'BasicRuntimeChecks', 'flag':'/RTC1', 'value':'3'})
flaglist.append({'name':'DebugInformationFormat', 'flag':'/Z7', 'value':'1'})
flaglist.append({'name':'DebugInformationFormat', 'flag':'/Zd', 'value':'2'})
flaglist.append({'name':'DebugInformationFormat', 'flag':'/Zi', 'value':'3'})
flaglist.append({'name':'DebugInformationFormat', 'flag':'/ZI', 'value':'4'})
flaglist.append({'name':'EnableEnhancedInstructionSet', 'flag':'/arch:SSE2', 'value':'2'})
flaglist.append({'name':'EnableEnhancedInstructionSet', 'flag':'/arch:SSE', 'value':'1'})
flaglist.append({'name':'FloatingPointModel', 'flag':'/fp:precise', 'value':'0'})
flaglist.append({'name':'FloatingPointModel', 'flag':'/fp:strict', 'value':'1'})
flaglist.append({'name':'FloatingPointModel', 'flag':'/fp:fast', 'value':'2'})
flaglist.append({'name':'FavorSizeOrSpeed', 'flag':'/Ot', 'value':'1'})
flaglist.append({'name':'FavorSizeOrSpeed', 'flag':'/Os', 'value':'2'})
flaglist.append({'name':'CompileAs', 'flag':'/TC','value':'1'})
flaglist.append({'name':'CompileAs', 'flag':'/TP','value':'2'})
flaglist.append({'name':'Optimization', 'flag':'/Od','value':'0'})
flaglist.append({'name':'Optimization', 'flag':'/O1','value':'1'})
flaglist.append({'name':'Optimization', 'flag':'/O2','value':'2'})
flaglist.append({'name':'Optimization', 'flag':'/Ox', 'value':'3'})
flaglist.append({'name':'OptimizeForProcessor', 'flag':'/GB', 'value':'0'})
flaglist.append({'name':'OptimizeForProcessor', 'flag':'/G5','value':'1'})
flaglist.append({'name':'OptimizeForProcessor', 'flag':'/G6','value':'2'})
flaglist.append({'name':'OptimizeForProcessor', 'flag':'/G7','value':'3'})
flaglist.append({'name':'InlineFunctionExpansion', 'flag':'/Ob0','value':'0'})
flaglist.append({'name':'InlineFunctionExpansion', 'flag':'/Ob1','value':'1'})
flaglist.append({'name':'InlineFunctionExpansion', 'flag':'/Ob2', 'value':'2'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/MTd','value':'1'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/MT', 'value':'0'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/MDd', 'value':'3'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/MD','value':'2'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/MLd','value':'5'})
flaglist.append({'name':'RuntimeLibrary', 'flag':'/ML','value':'4'})
flaglist.append({'name':'StructMemberAlignment', 'flag':'/Zp16','value':'5'})
flaglist.append({'name':'StructMemberAlignment', 'flag':'/Zp1','value':'1'})
flaglist.append({'name':'StructMemberAlignment', 'flag':'/Zp2','value':'2'})
flaglist.append({'name':'StructMemberAlignment', 'flag':'/Zp4','value':'3'})
flaglist.append({'name':'StructMemberAlignment', 'flag':'/Zp8','value':'4'})
flaglist.append({'name':'WarningLevel', 'flag':'/W0', 'value':'0'})
flaglist.append({'name':'WarningLevel', 'flag':'/W1', 'value':'1'})
flaglist.append({'name':'WarningLevel', 'flag':'/W2', 'value':'2'})
flaglist.append({'name':'WarningLevel', 'flag':'/W3', 'value':'3'})
flaglist.append({'name':'WarningLevel', 'flag':'/W4', 'value':'4'})
flaglist.append({'name':'BufferSecurityCheck', 'flag':'/GS-', 'value':'false'})
flaglist.append({'name':'BufferSecurityCheck', 'flag':'/GS', 'value':'true'})
flaglist.append({'name':'Detect64BitPortabilityProblems', 'flag':'/Wp64','value':'true'})
flaglist.append({'name':'EnableFiberSafeOptimizations', 'flag':'/GT','value':'true'})
flaglist.append({'name':'EnableFunctionLevelLinking', 'flag':'/Gy','value':'true'})
flaglist.append({'name':'EnableIntrinsicFunctions', 'flag':'/Oi', 'value':'true'})
flaglist.append({'name':'GlobalOptimizations', 'flag':'/Og', 'value':'true'})
flaglist.append({'name':'ImproveFloatingPointConsistency', 'flag':'/Op', 'value':'true'})
flaglist.append({'name':'MinimalRebuild', 'flag':'/Gm', 'value':'true'})
flaglist.append({'name':'OmitFramePointers', 'flag':'/Oy', 'value':'true'})
flaglist.append({'name':'OptimizeForWindowsApplication', 'flag':'/GA', 'value':'true'})
flaglist.append({'name':'RuntimeTypeInfo', 'flag':'/GR', 'value':'true'})
flaglist.append({'name':'RuntimeTypeInfo', 'flag':'/GR-','value':'false'})
flaglist.append({'name':'SmallerTypeCheck', 'flag':'/RTCc', 'value':'true'})
flaglist.append({'name':'SuppressStartupBanner', 'flag':'/nologo', 'value':'true'})
flaglist.append({'name':'WholeProgramOptimization', 'flag':'/GL', 'value':'true'})
flaglist.append({'name':'WholeProgramOptimization', 'flag':'/GL-','value':'false'})
flaglist.append({'name':'WarnAsError', 'flag':'/WX', 'value':'true'})
flaglist.append({'name':'BrowseInformation', 'flag':'/FR', 'value':'1'})
flaglist.append({'name':'StringPooling', 'flag':'/GF', 'value':'true'})
flaglist.append({'name':'ShowIncludes', 'flag':'/showIncludes', 'value':'true'})
#VC8
flaglist.append({'name':'CallingConvention', 'flag':'/Gd', 'value':'0'})
flaglist.append({'name':'CallingConvention', 'flag':'/Gr', 'value':'1'})
flaglist.append({'name':'CallingConvention', 'flag':'/Gz', 'value':'2'})
flaglist.append({'name':'ErrorReporting', 'flag':'/errorReport:prompt', 'value':'1'})
flaglist.append({'name':'ErrorReporting', 'flag':'/errorReport:queue', 'value':'2'})
flaglist.append({'name':'ExceptionHandling', 'flag':'/GX', 'value':'1'})
flaglist.append({'name':'ExceptionHandling', 'flag':'/EHsc', 'value':'1'})
flaglist.append({'name':'ExceptionHandling', 'flag':'/EHa', 'value':'2'})
flaglist.append({'name':'EnablePREfast', 'flag':'/analyze', 'value':'true'})
flaglist.append({'name':'EnablePREfast', 'flag':'/analyze-', 'value':'false'})
flaglist.append({'name':'TreatWChar_tAsBuiltInType', 'flag':'/Zc:wchar_t', 'value':'true'})
flaglist.append({'name':'TreatWChar_tAsBuiltInType', 'flag':'/Zc:wchar_t-', 'value':'false'})
def print_usage():
print 'Usage: vc2cm.py example.vcproj [-m foo=bar]* '
print 'Available commands:'
print "\t-m, --macro\tdefine macros, like: -m SolutionDir=\"Relative/Directory\""
print "\t-s, --strict\tbe strict(exclude missing files)"
print "\t-v, --verbose\tbe verbose(report missing files)"
print '*.vcproj to CMakeLists.txt converter /by Robert Keszeg (c)2015'
print 'Version 0.8 Use without warranty'
commands = sys.argv[2:len(sys.argv)]
state = 'free'
macros = {}
verbose = False
strict = False
for command in commands:
if verbose:
print 'command:'+command
if (state == 'free'):
if command == '-m' or command == '--macro':
state = 'macro'
elif command == '-v' or command == '--verbose':
verbose = True
elif command == '-s' or command == '--strict':
strict = True
else:
print 'Error: I don\'t understand the command<'+command+'>'
print_usage()
elif (state == 'macro'):
marr = command.split('=')
macros[marr[0]] = marr[1]
if verbose:
print '$('+marr[0]+') = "'+marr[1]+'"'
state = 'free'
if len(sys.argv) < 2:
print_usage()
tree = ET.parse(sys.argv[1])
root = tree.getroot()
projname = root.get("Name")
if verbose:
print "Project name:"+ projname;
def normalizepath(input):
patharr = input.split('\\')
if patharr[0] == '.':
newname = '/'.join(patharr[1:])
else:
newname = '/'.join(patharr)
for key in macros:
newname = newname.replace('$('+key+')',macros[key])
# print 'NORMALIZING:'+input+'==>'+newname
return newname
def pack_flag(flag,filename):
return flag+'\\"'+normalizepath(filename)+'\\"'
if verbose:
print "------ Configurations -----"
targetset = set()
configset = set()
platformset = set()
for config in root.iter("Configuration"):
cfgarr = config.get("Name").split("|");
cfgname = '|'.join(cfgarr)
targetset.add(cfgname)
configset.add(cfgarr[0])
platformset.add(cfgarr[1])
# cfgfilename = '_'.join(cfgarr)
# cfgfilename = cfgfilename.replace(" ","_")
# print cfgfilename
# cfgfile = open(cfgfilename+".xml", "w")
# cfgfile.write(ET.tostring(config))
# cfgfile.close()
cfg = sorted(list(configset))
def normalizestring(input):
return input.replace("|","_").replace("(","").replace(")","").replace(" ","_").replace("-","_").upper()
def get_compile_flags(tool):
compile_flags = []
if tool.has_key('AdditionalOptions'):
compile_flags.extend(tool['AdditionalOptions'].split(';'))
for item in flaglist:
if tool.has_key(item['name']):
if tool[item['name']] == item['value']:
compile_flags.append(item['flag'])
if tool.has_key('DisableSpecificWarnings'):
warnings = tool['DisableSpecificWarnings']
warning_list = warnings.split(';')
for warning in warning_list:
compile_flags.append('/wd'+warning)
# if tool.has_key('UsePrecompiledHeader') and tool['UsePrecompiledHeader'] != '0':
# headerthrough = 'stdafx.h'
# if tool.has_key('PrecompiledHeaderThrough'):
# headerthrough = tool['PrecompiledHeaderThrough']
# if tool['UsePrecompiledHeader'] == '1':
# compile_flags.append(pack_flag('/Yc',headerthrough))
# if tool['UsePrecompiledHeader'] == '2':
# compile_flags.append(pack_flag('/Yu',headerthrough))
#
# header = '$(IntDir)\$(TargetName).pch'
# if tool.has_key('PrecompiledHeaderFile'):
# header = tool['PrecompiledHeaderFile']
# compile_flags.append(pack_flag('/Fp',header))
if tool.has_key('ForcedIncludeFiles'):
compile_flags.append(pack_flag('/FI',tool['ForcedIncludeFiles']))
if tool.has_key('ForcedUsingFiles'):
compile_flags.append(pack_flag('/FU',tool['ForcedUsingFiles']))
if tool.has_key('UndefinePreprocessorDefinitions'):
compile_flags.append(pack_flag('/U',tool['UndefinePreprocessorDefinitions']))
if tool.has_key('AssemblerListingLocation'):
compile_flags.append(pack_flag('/Fa',tool['AssemblerListingLocation']))
if tool.has_key('ProgramDataBaseFileName'):
compile_flags.append(pack_flag('/Fd',tool['ProgramDataBaseFileName']))
return compile_flags
targetlist = sorted(list(targetset))
custom_commands = {}
#for target in targetlist:
# print normalizestring(target)
class Command:
def __init__(self,name,command,extension,output,description):
self.name = name
self.command = command
self.extension = extension
self.output = output
self.description = description
def getstrings(self):
lines = list()
lines.append('add_custom_target("'+self.name+'"')
lines.append('\tOUTPUT "'+normalizepath(self.output)+'"')
lines.append('\tCOMMAND \''+normalizepath(self.command)+'\'')
lines.append('\tCOMMENT "'+self.description+'"')
lines.append('\tSOURCES ${SOURCES}')
lines.append(')')
return lines
class Config:
def __init__(self, name, properties):
self.name = name
self.properties = properties.copy()
self.tools = {}
self.files = list()
def add_tool(self,name,properties):
self.tools[name] = properties
def add_file(self,file):
self.files.append(file)
def get_string(self):
lines = list()
lines.append('if(CMAKE_BUILD_TYPE STREQUAL "'+self.name+'")')
#include directories
if self.tools.has_key('VCCLCompilerTool'):
tool = self.tools['VCCLCompilerTool']
if tool.has_key('AdditionalIncludeDirectories'):
includes = tool['AdditionalIncludeDirectories'].split(';')
lines.append('\tinclude_directories(')
for dir in includes:
if dir == '.':
lines.append('\t\t"."')
else:
lines.append('\t\t"'+normalizepath(dir)+'"')
lines.append('\t)')
#source files
lines.append('')
lines.append('\tlist(APPEND CONFIGSRC ')
for file in self.files:
lines.append('\t\t"'+normalizepath(file)+'"')
lines.append('\t)')
for cmd in custom_commands:
command = custom_commands[cmd];
#lines.extend(command.getstrings())
#goal target
lines.append('#')
type = self.properties['ConfigurationType']
if type == '1':
lines.append('\tadd_executable('+projname+' ${SOURCES})')
elif type == '2':
lines.append('\tadd_library('+projname+' SHARED ${SOURCES})')
elif type == '4':
lines.append('\tadd_library('+projname+' STATIC ${SOURCES})')
else:
print 'Unknown configuration type:'+type
if self.tools.has_key('VCCLCompilerTool'):
tool = self.tools['VCCLCompilerTool']
if tool.has_key('PreprocessorDefinitions'):
preprocessor = tool['PreprocessorDefinitions'].split(';')
lines.append('\tset_property(TARGET '+projname);
lines.append('\t\tAPPEND PROPERTY COMPILE_DEFINITIONS');
for prep in preprocessor:
lines.append('\t\t'+prep);
lines.append('\t)')
#additional options
compile_flags = get_compile_flags(tool)
if len(compile_flags):
lines.append('\tif(MSVC)')
for compile_flag in compile_flags:
lines.append('\t\tset_property(TARGET '+projname+' APPEND_STRING PROPERTY COMPILE_FLAGS')
lines.append('\t\t\t"'+compile_flag+' "')#notice the extra space after every flag
lines.append('\t\t)')
lines.append('\tendif()')
lines.append('endif()')
return lines
class Group:
def __init__(self,names,files):
self.names = names
normalized = []
for file in files:
newname = normalizepath(file)
normalized.append(newname)
self.files = normalized
def getname(self):
name = normalizestring(projname+'_'+'_'.join(self.names)+'_SRC')
return name
def getnamevar(self):
name = '${'+self.getname()+'}'
return name
def tostring(self):
lines = []
lines.append('set('+self.getname())
for file in self.files:
lines.append('\t"'+file+'"')
lines.append(')')
lines.append('source_group("'+'\\\\'.join(self.names)+'"')
lines.append('\tFILES '+self.getnamevar())
lines.append(')')
return lines
if verbose:
print "------ Filters -----"
cmake_groups = list()
vcproj_configs = {}
def readconfigs(lroot):
for configxml in lroot:
cfgname = configxml.get("Name")
cfgprops = configxml.attrib
cfg = Config(cfgname,cfgprops)
for toolxml in configxml:
toolprops = toolxml.attrib
toolname = toolxml.get('Name')
cfg.add_tool(toolname,toolprops)
vcproj_configs[cfgname] = cfg
def readfileconfigs(fname,lroot):
for fileConfig in lroot:
cname = fileConfig.get('Name')
excluded = False
if ( fileConfig.attrib.has_key('ExcludedFromBuild')) and (fileConfig.get('ExcludedFromBuild') == "true"):
excluded = True
# print "Excluded:"+fname
if vcproj_configs.has_key(cname):
config = vcproj_configs[cname]
for tool in fileConfig:
if not excluded:
config.add_file(fname)
def getgroupsfor(rootnames,lroot):
# print "//".join(rootnames)
files = list()
for xmlFile in lroot:
if xmlFile.tag == "Filter":
continue
xmlfilename = xmlFile.get("RelativePath")
fileexists = os.path.exists(xmlfilename)
if not strict:
files.append(xmlfilename)
if verbose and not fileexists:
print "file mapped in vcproj not found: " + xmlfilename;
readfileconfigs(xmlfilename,xmlFile)
group = Group(rootnames,files)
cmake_groups.append(group)
for xmlFilter in lroot:
if xmlFilter.tag == "File":
continue
xmlfiltername = xmlFilter.get("Name")
newnames = rootnames[:]
newnames.append(xmlfiltername)
getgroupsfor(newnames,xmlFilter)
def getTool(toolpath):
xmltool = ET.parse(toolpath)
root = xmltool.getroot()
if verbose:
print "TOOL: "+root.get("Name")
xmlrules = xmltool.find("Rules")
for xmlbuildrule in xmlrules:
name = xmlbuildrule.get("Name")
cmd = xmlbuildrule.get("CommandLine")
ext = xmlbuildrule.get("FileExtensions")
output = xmlbuildrule.get("Outputs")
description = xmlbuildrule.get("ExecutionDescription");
custom_commands[name] = Command(name,cmd,ext,output,description)
xmlTools = root.find("ToolFiles")
for xmlTool in xmlTools:
getTool(xmlTool.get("RelativePath"))
xmlConfigs = root.find("Configurations")
readconfigs(xmlConfigs)
xmlFiles = root.find("Files")
getgroupsfor([],xmlFiles)
if verbose:
print "------ Output -----"
lines = []
#lines.append('set(CMAKE_CONFIGURATION_TYPES "'+";".join(cfg)+'" CACHE STRING "VS/XCode configurations" FORCE)')
lines.append('cmake_minimum_required(VERSION 2.8)')
lines.append('project('+projname+')')
lines.append('set(CONFIGSRC)')
lines.append('if(NOT CMAKE_BUILD_TYPE)')
lines.append('set(CMAKE_BUILD_TYPE Debug|Win32 CACHE STRING')
lines.append('\t"Choose the type of the build, options are: ${TARGETS} " FORCE)')
lines.append('endif()')
for group in cmake_groups:
lines.extend(group.tostring())
lines.append('set(SOURCES')
for group in cmake_groups:
lines.append('\t'+group.getnamevar()+'')
lines.append(')')
lines.append('set(TARGETS')
for target in targetlist:
lines.append('\t"'+target+'"')
lines.append(')')
for name in vcproj_configs:
lines.extend(vcproj_configs[name].get_string())
lines.append('set_target_properties( '+projname+' PROPERTIES LINKER_LANGUAGE C++ )')
lines.append('set(TOEXCLUDE ${SOURCES})')
lines.append('list(REMOVE_ITEM TOEXCLUDE ${CONFIGSRC})')
lines.append('set_source_files_properties(${TOEXCLUDE} PROPERTIES HEADER_FILE_ONLY ON)')
if verbose:
print "------ End -----"
cmakefile = open("CMakeLists.txt","w")
cmakefile.write('\n'.join(lines).replace("\t"," "))
cmakefile.close()
|
"""@file reconstructor.py
contains the Reconstructor class"""
from abc import ABCMeta, abstractmethod
import os
import scipy.io.wavfile as wav
import numpy as np
class Reconstructor(object):
"""the general reconstructor class
a reconstructor is used to reconstruct the signals from the models output"""
__metaclass__ = ABCMeta
def __init__(self, conf, evalconf, dataconf, rec_dir, task, optimal_frame_permutation=False):
"""Reconstructor constructor
Args:
conf: the reconstructor configuration as a dictionary
evalconf: the evaluator configuration as a ConfigParser
dataconf: the database configuration
rec_dir: the directory where the reconstructions will be stored
"""
self.conf = conf
self.dataconf = dataconf
if evalconf.has_option(task, 'batch_size'):
self.batch_size = int(evalconf.get(task, 'batch_size'))
else:
self.batch_size = int(evalconf.get('evaluator', 'batch_size'))
self.segment_lengths = evalconf.get('evaluator', 'segment_length').split(' ')
self.optimal_frame_permutation = optimal_frame_permutation
self.nrS = int(conf['nrs'])
if 'transpose_order' in conf:
self.transpose_order = map(int, conf['transpose_order'].split(' '))
else:
self.transpose_order = False
# create the directory to write down the reconstructions
self.rec_dir = rec_dir
if not os.path.isdir(self.rec_dir):
os.makedirs(self.rec_dir)
for spk in range(self.nrS):
if not os.path.isdir(os.path.join(self.rec_dir, 's' + str(spk+1))):
os.makedirs(os.path.join(self.rec_dir, 's' + str(spk+1)))
# the use of the position variable only works because in the evaluator the
# shuffle option in the data_queue is set to False!!
self.pos = 0
self.scp_file = os.path.join(self.rec_dir, 'pointers.scp')
# whether to save output as numpy instead of wav file
if 'save_as_numpy' in conf:
self.save_as_numpy = conf['save_as_numpy'] in ['True', 'true']
else:
self.save_as_numpy = False
# Whether the raw output should also be stored (besides the reconstructed audiosignal)
self.store_output = conf['store_output'] == 'True'
if self.store_output:
self.output_dir = os.path.join(rec_dir, 'raw_output')
if not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
def __call__(self, batch_outputs, batch_sequence_lengths):
""" reconstruct the signals and write the audio files
Args:
- batch_outputs: A dictionary containing the batch outputs of the network
- batch_sequence_lengths: A dictionary containing the sequence length for each utterance
"""
if self.transpose_order:
for output_name in self.requested_output_names:
batch_outputs[output_name] = np.transpose(batch_outputs[output_name], self.transpose_order)
for utt_ind in range(self.batch_size):
utt_output = dict()
for output_name in self.requested_output_names:
# anchor output for anchor_deepattractornet_softmax_reconstructor is special case
if output_name is 'anchors' and self.__class__.__name__ in ['AnchorDeepattractorSoftmaxReconstructor', 'WeightedAnchorDeepattractorSoftmaxReconstructor']:
utt_output[output_name] = batch_outputs[output_name]
elif output_name is 'anchors_scale' and self.__class__.__name__ in ['TimeAnchorScalarDeepattractorSoftmaxReconstructor']:
utt_output[output_name] = batch_outputs[output_name]
else:
utt_output[output_name] = \
batch_outputs[output_name][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
# reconstruct the signals
reconstructed_signals, utt_info = self.reconstruct_signals(utt_output)
# make the audio files for the reconstructed signals
if self.save_as_numpy:
filename = os.path.join(self.rec_dir, utt_info['utt_name'] + '.npy')
np.save(filename, reconstructed_signals)
else:
self.write_audiofile(reconstructed_signals, utt_info)
# if requested store the raw output
if self.store_output:
for output_name in self.requested_output_names:
savename = output_name+'_'+utt_info['utt_name']
np.save(os.path.join(self.output_dir, savename), utt_output[output_name])
self.pos += 1
def opt_frame_perm(self, batch_outputs, batch_targets, batch_sequence_lengths):
""" reconstruct the signals, using the optimal speaker permutations on frame level using the targets, and write
the audio files
Args:
- batch_outputs: A dictionary containing the batch outputs of the network
- batch_outputs: A dictionary containing the batch targets for the outputs
- batch_sequence_lengths: A dictionary containing the sequence length for each utterance
"""
for utt_ind in range(self.batch_size):
utt_output = dict()
for output_name in self.requested_output_names:
utt_output[output_name] = \
batch_outputs[output_name][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
# assuming only one requested target
target_keys = [key for key in batch_targets.keys() if 'target' in key]
utt_target = {
key: batch_targets[key][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
for key in target_keys}
# reconstruct the signals
reconstructed_signals, utt_info = self.reconstruct_signals_opt_frame_perm(utt_output, utt_target)
# make the audio files for the reconstructed signals
self.write_audiofile(reconstructed_signals, utt_info)
# if requested store the raw output
if self.store_output:
for output_name in self.requested_output_names:
savename = output_name+'_'+utt_info['utt_name']
np.save(os.path.join(self.output_dir, savename), utt_output[output_name])
self.pos += 1
@abstractmethod
def reconstruct_signals(self, output):
"""reconstruct the signals
Args:
output: the output of a single utterance of the neural network
Returns:
the reconstructed signals"""
def write_audiofile(self, reconstructed_signals, utt_info):
"""write the audiofiles for the reconstructions
Args:
reconstructed_signals: the reconstructed signals for a single mixture
utt_info: some info on the utterance
"""
write_str = utt_info['utt_name']
for spk in range(self.nrS):
rec_dir = os.path.join(self.rec_dir, 's' + str(spk+1))
filename = os.path.join(rec_dir, utt_info['utt_name']+'.wav')
signal = reconstructed_signals[spk]
if signal.dtype == np.float64:
signal = np.float32(signal)
wav.write(filename, utt_info['rate'], signal)
write_str += ' ' + filename
write_str += ' \n'
self.scp_fid.write(write_str)
def open_scp_files(self, from_start=True):
if from_start:
file_mode = 'w'
else:
file_mode = 'a+'
self.scp_fid = open(self.scp_file, file_mode)
|
#!/usr/bin/env python
import math
import re
import StringIO
import copy
import random
import types
import struct
import colorsys
#Class definition for Gradients
#These use the format defined by the GIMP
#The file format is:
# GIMP Gradient ; literal identifier
# Name: <utf8-name> ; optional, else get from filename
# 3 ; number of points N
# ; N lines like this
# 0.000000 0.166667 0.333333 0.000000 0.000000 1.000000 1.000000 0.000000 0.000000 1.000000 1.000000 0 0
# The format is
# start middle end [range 0...1]
# R G B A left endpoint
# R G B A right endpoint
# segment_type coloring_type
# segment-type is
# GIMP_GRADIENT_SEGMENT_LINEAR,
# GIMP_GRADIENT_SEGMENT_CURVED,
# GIMP_GRADIENT_SEGMENT_SINE,
# GIMP_GRADIENT_SEGMENT_SPHERE_INCREASING,
# GIMP_GRADIENT_SEGMENT_SPHERE_DECREASING
# color type is
# GIMP_GRADIENT_SEGMENT_RGB, /* normal RGB */
# GIMP_GRADIENT_SEGMENT_HSV_CCW, /* counterclockwise hue */
# GIMP_GRADIENT_SEGMENT_HSV_CW /* clockwise hue */
#gradientfile_re = re.compile(r'\s*(RGB|HSV)\s+(Linear|Sinusoidal|CurvedI|CurvedD)\s+(\d+\.?\d+)\s+(\d+)\s+(\d+)\s+(\d+\.?\d+)\s+(\d+)\s+(\d+)')
rgb_re = re.compile(r'\s*(\d+)\s+(\d+)\s+(\d+)')
class FileType:
MAP, GGR, CS, UGR = range(4)
@staticmethod
def guess(s):
s = s.lower()
if s.endswith(".map"):
return FileType.MAP
elif s.endswith(".cs"):
return FileType.CS
elif s.endswith(".ugr"):
return FileType.UGR
else:
# assume a GIMP gradient, those sometimes don't have extensions
return FileType.GGR
class Blend:
LINEAR, CURVED, SINE, SPHERE_INCREASING, SPHERE_DECREASING = range(5)
class ColorMode:
RGB, HSV_CCW, HSV_CW = range(3)
class Error(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class HsvError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class Segment:
EPSILON=1.0E-7
def __init__(self, left, left_color, right, right_color, mid=None,
blend_mode=Blend.LINEAR,
color_mode=ColorMode.RGB):
self.cmode = color_mode
self.bmode = blend_mode
self.left = left
self.left_color = left_color
self.right = right
self.right_color = right_color
if mid == None:
self.center()
else:
self.mid = mid
def __copy__(self):
return Segment(
self.left, self.left_color[:],
self.right, self.right_color[:], self.mid,
self.blend_mode, self.color_mode)
def __eq__(self,other):
if other == None: return False
if not isinstance(other, Segment): return False
return self.cmode == other.cmode and \
self.bmode == other.bmode and \
self.close(self.left, other.left) and \
self.close(self.right, other.right) and \
self.close(self.mid, other.mid) and \
self.close(self.left_color, other.left_color) and \
self.close(self.right_color, other.right_color)
def __ne__(self, other):
return not self.__eq__(other)
def left_of(self,other):
# true if other.left == this.right
return other.left == self.right and \
other.left_color[0] == self.right_color[0] and \
other.left_color[1] == self.right_color[1] and \
other.left_color[2] == self.right_color[2] and \
other.left_color[3] == self.right_color[3]
def right_of(self,other):
# true if other.right == this.left
return other.right == self.left and \
other.right_color[0] == self.left_color[0] and \
other.right_color[1] == self.left_color[1] and \
other.right_color[2] == self.left_color[2] and \
other.right_color[3] == self.left_color[3]
def close(self, a, b):
# True if a is nearly == b
if isinstance(a, types.ListType):
for (ax,bx) in zip(a,b):
if abs(ax-bx) > 1.0E-5:
return False
return True
else:
return abs(a-b) < 1.0E-5
def center(self):
self.mid = (self.left + self.right) / 2.0
def get_linear_factor(self, pos, middle):
if pos <= middle:
if middle < Segment.EPSILON:
return 0.0
else:
return 0.5 * pos / middle
else:
pos -= middle;
middle = 1.0 - middle
if middle < Segment.EPSILON:
return 1.0
else:
return 0.5 + 0.5 * pos / middle
def get_curved_factor(self, pos, middle):
if middle < Segment.EPSILON:
middle = Segment.EPSILON
try:
return math.pow(pos, ( math.log(0.5) / math.log(middle) ))
except ZeroDivisionError:
# 0^negative number is NaN
return 0.0
def get_sine_factor(self, pos, middle):
pos = self.get_linear_factor(pos, middle)
return (math.sin ((-math.pi / 2.0) + math.pi * pos) + 1.0) / 2.0
def get_sphere_increasing_factor(self, pos, middle):
pos = self.get_linear_factor(pos, middle) - 1.0
return math.sqrt (1.0 - pos * pos)
def get_sphere_decreasing_factor(self, pos, middle):
pos = self.get_linear_factor(pos, middle)
return 1.0 - math.sqrt (1.0 - pos * pos)
def get_color_at(self, pos):
'compute the color value for a point in this segment'
lcol = self.left_color
rcol = self.right_color
if self.cmode == ColorMode.HSV_CCW or self.cmode == ColorMode.HSV_CW:
lcol = [v for v in colorsys.rgb_to_hsv(lcol[0],lcol[1],lcol[2])] + [lcol[3]]
rcol = [v for v in colorsys.rgb_to_hsv(rcol[0],rcol[1],rcol[2])] + [rcol[3]]
if self.cmode == ColorMode.HSV_CCW:
if lcol[0] >= rcol[0]: rcol[0] += 1.0
if self.cmode == ColorMode.HSV_CW:
if lcol[0] <= rcol[0]: lcol[0] += 1.0
len = self.right-self.left
if len < Segment.EPSILON:
# avoid division by zero
mpos = 0.5
pos = 0.5
else:
mpos = (self.mid - self.left) / len
pos = (pos- self.left) / len
if self.bmode == Blend.LINEAR:
factor = self.get_linear_factor(pos, mpos)
elif self.bmode == Blend.CURVED:
factor = self.get_curved_factor(pos, mpos)
elif self.bmode == Blend.SINE:
factor = self.get_sine_factor(pos, mpos)
elif self.bmode == Blend.SPHERE_INCREASING:
factor = self.get_sphere_increasing_factor(pos, mpos)
elif self.bmode == Blend.SPHERE_DECREASING:
factor = self.get_sphere_decreasing_factor(pos, mpos)
#Assume RGB mode, for the moment
RH = lcol[0] + (rcol[0] - lcol[0]) * factor
GS = lcol[1] + (rcol[1] - lcol[1]) * factor
BV = lcol[2] + (rcol[2] - lcol[2]) * factor
A = lcol[3] + (rcol[3] - lcol[3]) * factor
if self.cmode == ColorMode.RGB:
return [RH, GS, BV, A]
if self.cmode == ColorMode.HSV_CCW or self.cmode == ColorMode.HSV_CW:
if RH > 1: RH -= 1
return [v for v in colorsys.hsv_to_rgb(RH,GS,BV)] + [A]
def save(self,f,skip_left=False):
if skip_left:
# this segment's left end == previous right, so leave it out
print >>f, "+%6f %6f" % (self.mid, self.right),
for x in self.right_color:
print >>f, "%6f" % x,
else:
print >>f, "%6f %6f %6f" % (self.left, self.mid, self.right),
for x in self.left_color + self.right_color:
print >>f, "%6f" % x,
print >>f, "%d %d" % (self.bmode, self.cmode)
class Gradient:
def __init__(self):
self.segments=[
Segment(0,[0,0,0,1.0], 1.0, [1.0,1.0,1.0,1.0])]
self.name=None
self.alternate=0
self.offset=0
self.cobject=None
def __copy__(self):
c = Gradient()
c.name = self.name
c.alternate = self.alternate
c.offset = self.offset
c.segments = copy.deepcopy(self.segments)
return c
def __eq__(self, other):
if other == None: return False
if not isinstance(other, Gradient): return False
if self.name != other.name: return False
if self.segments != other.segments: return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def serialize(self):
s = StringIO.StringIO()
self.save(s,True)
return s.getvalue()
def save(self,f,compress=False):
print >>f, "GIMP Gradient"
if self.name:
print >>f, "Name:", self.name
print >>f, len(self.segments)
last = None
for seg in self.segments:
compress_seg = compress and last != None and seg.right_of(last)
seg.save(f, compress_seg)
last = seg
def load_cs(self, f):
"Load a ColorSchemer (.cs) palette file"
# this appears to work but file format was reverse engineered
# so there may be cases unaccounted for
(ncolors,) = struct.unpack("2xB5x",f.read(8))
list = []
for i in xrange(ncolors):
(r,g,b,skip) = struct.unpack("<BBBxI", f.read(8))
entry = (i/float(ncolors), r,g,b,255)
f.read(skip)
(r2,g2,b2,skip) = struct.unpack("BBBB", f.read(4))
f.read(skip+1)
list.append(entry)
self.load_list(list)
def load_ugr(self, f):
"Load an ir tree parsed by the translator"
prev_index = 0.0
index = 0.0
segments = []
prev_color = [0.0,0.0,0.0,0.0]
for s in f.sections["gradient"].children:
(name,val) = (s.children[0].name, s.children[1].value)
if name == "index":
index = float(val)/400.0
elif name == "color":
icolor = val
color = [
float(icolor & 0xFF) / 256.0,
float((icolor >> 8) & 0xFF) / 256.0,
float((icolor >> 16) & 0xFF) / 256.0,
1.0]
seg = Segment(
prev_index, prev_color,
index, color,
(prev_index + index)/2.0,
Blend.LINEAR, ColorMode.RGB)
segments.append(seg)
prev_index = index
prev_color = color
elif name == "smooth":
pass #self.smooth = val
elif name == "title":
self.name = val
# append a last chunk from the final value to 1.0
seg = Segment(
prev_index, prev_color,
1.0, prev_color,
(prev_index + 1.0)/2.0,
Blend.LINEAR, ColorMode.RGB)
segments.append(seg)
self.segments = segments
def load_gimp_gradient(self,f):
new_segments = []
name = None
line = f.readline()
if line.startswith("Name:"):
name = line[5:].strip()
line = f.readline()
num_vals = int(line)
for i in xrange(num_vals):
line = f.readline()
if line[:1] == "+":
# a compressed continuation, use last vals
left = right
lr,lg,lb,la = rr,rg,rb,ra
[mid,right,rr,rg,rb,ra,bmode,cmode] = line.split()
else:
list_elements = line.split()
[left, mid, right,
lr, lg, lb, la,
rr, rg, rb, ra,
bmode, cmode] = list_elements[0:13]
seg = Segment(
float(left), [float(lr), float(lg), float(lb), float(la)],
float(right),[float(rr), float(rg), float(rb), float(ra)],
float(mid),
int(bmode), int(cmode))
new_segments.append(seg)
self.segments = new_segments
self.name = name
def load(self,f):
if hasattr(f, "readline"):
# assume this is a file type
line = f.readline()
if line == "GIMP Gradient\n":
return self.load_gimp_gradient(f)
elif line[:2] == "\x03\x00":
# a .cs file, we suspect
f.seek(0)
return self.load_cs(f)
else:
f.seek(0)
return self.load_map_file(f)
else:
# assume it's a translated UGR file
return self.load_ugr(f)
def compare_colors(self, c1, c2, maxdiff=0):
# return true if floating-point colors c1 and c2 are close
# enough that they would be equal when truncated to 8 bits
for (a,b) in zip(c1, c2):
a8 = int(a * 255.0)
b8 = int(b * 255.0)
if abs(a8 - b8) > maxdiff:
return False
return True
def load_map_file(self,mapfile,maxdiff=0):
i = 0
colorlist = []
solid = (0,0,0,255)
for line in mapfile:
m = rgb_re.match(line)
if m != None:
(r,g,b) = (min(255, int(m.group(1))),
min(255, int(m.group(2))),
min(255, int(m.group(3))))
if i == 0:
# first color is inside solid color
solid = (r,g,b,255)
else:
colorlist.append(((i-1)/255.0,r,g,b,255))
i += 1
self.load_list(colorlist,maxdiff)
return solid
def load_list(self,l, maxdiff=0):
# a colorlist is a simplified gradient, of the form
# (index, r, g, b, a) (colors are 0-255 ints)
# each index is the left-hand end of the segment
# each colorlist entry is mapped to a segment endpoint
if len(l) == 0:
raise Error("No colors found")
new_segments = []
last_index = 0.0
last_color = [0.0,0.0,0.0,1.0]
before_last_color = [-1000.0, -1000.0 , -1000.0, -1000.0] # meaningless color
before_last_index = -1.0
for (index,r,g,b,a) in l:
color = [r/255.0, g/255.0, b/255.0, a/255.0]
if index != last_index:
test_segment = Segment(
before_last_index,
before_last_color,
index,
color)
if self.compare_colors(
test_segment.get_color_at(last_index), last_color, maxdiff):
# can compress, update in place
new_segments[-1].right_color = color
new_segments[-1].right = index
new_segments[-1].center()
else:
new_segments.append(
Segment(last_index, last_color, index, color))
before_last_index = last_index
before_last_color = last_color
last_color = color
last_index = index
# fix gradient by adding extra flat section if last index not 1.0
if new_segments[-1].right != 1.0:
new_segments.append(
Segment(new_segments[-1].right, last_color, 1.0, last_color))
self.segments = new_segments
def load_fractint(self,l):
# l is a list of colors from a Fractint .par file
# convert format to colorlist
i = 0
colors = []
for (r,g,b) in l:
colors.append((i/255.0,r*4,g*4,b*4,255))
i += 1
# load it
self.load_list(colors,-1.0)
def set_color(self,seg_id,is_left,r,g,b):
if seg_id < 0 or seg_id >= len(self.segments):
return False
seg = self.segments[seg_id]
if is_left:
seg.left_color = [r,g,b, seg.left_color[3]]
else:
seg.right_color = [r,g,b, seg.right_color[3]]
return True
def complementaries(self, base_color):
# return some other colors that "go" with this one
hsv = RGBtoHSV(base_color)
(h,s,v,a) = hsv
# take 2 colors which are almost triads
h = hsv[0]
delta = random.gauss(0.0, 0.8)
h2 = math.fmod(h + 2.5 + delta, 6.0)
h3 = math.fmod(h + 3.5 - delta, 6.0)
# take darker and lighter versions
v = hsv[2]
vlight = self.clamp(v * 1.5, 0.0, 1.0)
vdark = v * 0.5
colors = [
[h, s, vdark, a],
[h, s, v, a],
[h, s, vlight, a],
[h2, s, vlight, a],
[h2, s, v, a],
[h2, s, vdark, a],
[h3, s, vdark, a],
[h3, s, v, a],
[h3, s, vlight, a]]
colors = [ HSVtoRGB(x) for x in colors]
return colors
def randomize(self, length):
if random.random() < 0.5:
self.randomize_complementary(length)
else:
self.randomize_spheres((int(random.random() * 4)+3)*2)
def randomize_complementary(self,length):
base = [random.random(), random.random(), random.random(), 1.0]
colors = self.complementaries(base)
self.segments = []
prev_index = 0.0
prev_color = colors[0]
first_color = prev_color
for i in xrange(9-1):
index = float(i+1)/length
color = colors[i]
self.segments.append(
Segment(prev_index, prev_color, index, color))
prev_color = color
prev_index = index
self.segments.append(
Segment(prev_index, prev_color, 1.0, first_color)) # make it wrap
def random_bright_color(self):
return HSVtoRGB(
[ random.random() * 360.0,
random.random(),
random.random() * 0.6 + 0.4,
1.0])
def randomize_spheres(self, length):
self.segments = []
prev_index = 0.0
prev_color = self.random_bright_color()
first_color = prev_color
for i in xrange(length-1):
index = float(i+1)/length
if i % 2 == 1:
color = self.random_bright_color()
blend = Blend.SPHERE_INCREASING
else:
color = [0.0, 0.0, 0.0, 1.0]
blend = Blend.SPHERE_DECREASING
self.segments.append(
Segment(prev_index, prev_color, index, color, None, blend))
prev_color = color
prev_index = index
self.segments.append(
Segment(prev_index, prev_color, 1.0, first_color)) # make it wrap
def get_color_at(self, pos):
# returns the color at position x (0 <= x <= 1.0)
seg = self.get_segment_at(pos)
return seg.get_color_at(pos)
def get_segment_at(self, pos):
#Returns the segment in which pos resides.
if pos < 0.0:
raise IndexError("Must be between 0 and 1, is %s" % pos)
for seg in self.segments:
if pos <= seg.right:
return seg
# not found - must be > 1.0
raise IndexError("Must be between 0 and 1, is %s" % pos)
def get_index_at(self, pos):
# returns the index of the segment in which pos resides
if pos < 0.0:
raise IndexError("Must be between 0 and 1")
length = len(self.segments)
for i in xrange(length):
if pos <= self.segments[i].right:
return i
# not found - must be > 1.0
raise IndexError("Must be between 0 and 1")
def add(self, segindex):
# split the segment which contains point x in half
seg = self.segments[segindex]
if segindex+1 < len(self.segments):
# copy info from next segment to right
segright = self.segments[segindex+1]
right_index = segright.left
right_color = segright.left_color
else:
# adding at right-hand end
right_index = 1.0
right_color = seg.right_color
s_len = (seg.right-seg.left)
s_mid = seg.left + s_len*0.5
newcol= self.get_color_at(s_mid)
# update existing segment to occupy left half
seg.right = s_mid
seg.right_color = newcol
seg.center()
# add new segment to fill right half
self.segments.insert(
segindex+1,
Segment(s_mid, newcol,
right_index, right_color,
None,
seg.bmode, seg.cmode))
def remove(self, segindex, smooth=False):
# remove the segment which contains point x
# extend each of our neighbors so they get half our space each
if len(self.segments) < 2:
raise Error("Can't remove last segment")
seg = self.segments[segindex]
if segindex > 0:
# we have a previous segment
if segindex+1 < len(self.segments):
# and we have a next. Move them both to touch in the middle
self.segments[segindex-1].right=seg.mid
self.segments[segindex+1].left=seg.mid
self.segments[segindex-1].center()
self.segments[segindex+1].center()
if smooth:
midcolor = seg.get_color_at(seg.mid)
self.segments[segindex-1].right_color = copy.copy(midcolor)
self.segments[segindex+1].left_color = copy.copy(midcolor)
else:
# just a left-hand neighbor, let that take over
self.segments[segindex-1].right = 1.0
if smooth:
self.segments[segindex-1].right_color = \
copy.copy(self.segments[segindex].right_color)
self.segments[segindex-1].center()
else:
# we must have a later segment
self.segments[segindex+1].left=0.0
if smooth:
self.segments[segindex+1].left_color = \
copy.copy(self.segments[segindex].left_color)
self.segments[segindex+1].center()
self.segments.pop(segindex)
def clamp(self,a,min,max):
if a > max:
return max
elif a < min:
return min
else:
return a
def set_left(self,i,pos):
# set left end of segment i to pos, if possible
if i < 0 or i >= len(self.segments):
raise IndexError("No such segment")
if i == 0:
# can't move left-hand end of entire gradient
return 0.0
else:
pos = self.clamp(pos,
self.segments[i-1].mid + Segment.EPSILON,
self.segments[i].mid - Segment.EPSILON)
self.segments[i-1].right = self.segments[i].left = pos
return pos
def set_right(self,i,pos):
# set left end of segment i to pos, if possible
if i < 0 or i >= len(self.segments):
raise IndexError("No such segment")
max = len(self.segments)-1
if i == max:
# can't move right-hand end of entire gradient
return 1.0
else:
pos = self.clamp(pos,
self.segments[i].mid + Segment.EPSILON,
self.segments[i+1].mid - Segment.EPSILON)
self.segments[i+1].left = self.segments[i].right = pos
return pos
def set_middle(self,i,pos):
# set middle of segment i to pos, if possible
if i < 0 or i >= len(self.segments):
raise IndexError("No such segment")
pos = self.clamp(pos,
self.segments[i].left + Segment.EPSILON,
self.segments[i].right - Segment.EPSILON)
self.segments[i].mid = pos
return pos
def broken_move(self, handle, move):
seg, side = self.getSegFromHandle(handle)
segindex = self.segments.index(seg)
if (segindex > 0 or side == 'right') and (segindex < len(self.segments)-1 or side == 'left'):
if side == 'left':
self.segments[segindex-1].right.pos+=move
if self.segments[segindex-1].right.pos > 1:
self.segments[segindex-1].right.pos = 1
elif self.segments[segindex-1].right.pos < 0:
self.segments[segindex-1].right.pos = 0
seg.left.pos+=move
if seg.left.pos > 1:
seg.left.pos =1
elif seg.left.pos < 0:
seg.left.pos =0
if seg.left.pos > seg.right.pos:
seg.left.pos = seg.right.pos
self.segments[segindex-1].right.pos=seg.right.pos
elif self.segments[segindex-1].right.pos < self.segments[segindex-1].left.pos:
self.segments[segindex-1].right.pos=self.segments[segindex-1].left.pos
seg.left.pos=self.segments[segindex-1].left.pos
else:
self.segments[segindex+1].left.pos+=move
if self.segments[segindex+1].left.pos > 1:
self.segments[segindex+1].left.pos = 1
elif self.segments[segindex+1].left.pos < 0:
self.segments[segindex+1].left.pos = 0
seg.right.pos+=move
if seg.right.pos > 1:
seg.right.pos =1
elif seg.right.pos < 0:
seg.right.pos =0
if seg.left.pos > seg.right.pos:
seg.right.pos=seg.left.pos
self.segments[segindex+1].left.pos=seg.left.pos
elif self.segments[segindex+1].right.pos < self.segments[segindex+1].left.pos:
self.segments[segindex+1].left.pos=self.segments[segindex+1].right.pos
seg.right.pos=self.segments[segindex+1].right.pos
# These two are adapted from the algorithms at
# http://www.cs.rit.edu/~ncs/color/t_convert.html
def RGBtoHSV(rgb):
hsv = [0,0,0,rgb[3]]
trgb = rgb[0:3]
trgb.sort()
min = trgb[0]
max = trgb[2]
delta = float(max - min)
hsv[2] = max
if delta == 0:
# r = g = b = 0 # s = 0, v is undefined
hsv[1] = 0
hsv[0] = -1
else:
hsv[1]=delta / max
if rgb[0] == max:
hsv[0] = (rgb[1] - rgb[2]) / delta # between yellow & magenta
elif rgb[1] == max:
hsv[0] = 2 + (rgb[2] - rgb[0] ) / delta # between cyan & yellow
else:
hsv[0] = 4 + (rgb[0] - rgb[1] ) / delta # between magenta & cyan
hsv[0] *= 60 # degrees
if hsv[0] < 0:
hsv[0] += 360
return hsv
def HSVtoRGB(hsv):
rgb=[0,0,0,hsv[3]] # pass through alpha channel
hsv[0]/=60
if hsv[1] == 0:
return [hsv[2],hsv[2],hsv[2]]
i = int(hsv[0])
f = hsv[0] - i #Decimal bit of hue
p = hsv[2] * (1 - hsv[1])
q = hsv[2] * (1 - hsv[1] * f)
t = hsv[2] * (1 - hsv[1] * (1 - f))
if i == 0:
rgb[0] = hsv[2]
rgb[1] = t
rgb[2] = p
elif i == 1:
rgb[0] = q
rgb[1] = hsv[2]
rgb[2] = p
elif i == 2:
rgb[0] = p
rgb[1] = hsv[2]
rgb[2] = t
elif i == 3:
rgb[0] = p
rgb[1] = q
rgb[2] = hsv[2]
elif i == 4:
rgb[0] = t
rgb[1] = p
rgb[2] = hsv[2]
elif i == 5:
rgb[0] = hsv[2]
rgb[1] = p
rgb[2] = q
return rgb
|
#
# SfaAPI authentication
#
import sys
from vt_manager_kvm.communication.sfa.util.faults import InsufficientRights, MissingCallerGID, MissingTrustedRoots, PermissionError, \
BadRequestHash, ConnectionKeyGIDMismatch, SfaPermissionDenied
#from vt_manager_kvm.communication.sfa.util.config import Config
from vt_manager_kvm.communication.sfa.util.xrn import get_authority
from vt_manager_kvm.communication.sfa.trust.gid import GID
from vt_manager_kvm.communication.sfa.trust.rights import Rights
from vt_manager_kvm.communication.sfa.trust.certificate import Keypair, Certificate
from vt_manager_kvm.communication.sfa.trust.credential import Credential
from vt_manager_kvm.communication.sfa.trust.trustedroots import TrustedRoots
from vt_manager_kvm.communication.sfa.trust.hierarchy import Hierarchy
from vt_manager_kvm.communication.sfa.trust.sfaticket import SfaTicket
from vt_manager_kvm.communication.sfa.sfa_config import config as CONFIG
class Auth:
"""
Credential based authentication
"""
def __init__(self, peer_cert = None, config = None ):
self.peer_cert = peer_cert
self.hierarchy = Hierarchy()
#if not config:
self.config = CONFIG#Config()
self.load_trusted_certs()
def load_trusted_certs(self):
self.trusted_cert_list = TrustedRoots(self.config.TRUSTED_ROOTS_DIR).get_list()
self.trusted_cert_file_list = TrustedRoots(self.config.TRUSTED_ROOTS_DIR).get_file_list()
def checkCredentials(self, creds, operation, hrn = None):
valid = []
error = None
if not isinstance(creds, list):
creds = [creds]
for cred in creds:
try:
self.check(cred, operation, hrn)
valid.append(cred)
except Exception as e:
cred_obj=Credential(string=cred)
error = e#sys.exc_info()[:2]
continue
if not len(valid):
if not error:
error = "No valid credentials found"
raise InsufficientRights('Access denied: %s' % (str(error)))
return valid
def check(self, cred, operation, hrn = None):
"""
Check the credential against the peer cert (callerGID included
in the credential matches the caller that is connected to the
HTTPS connection, check if the credential was signed by a
trusted cert and check if the credential is allowed to perform
the specified operation.
"""
self.client_cred = Credential(string = cred)
self.client_gid = self.client_cred.get_gid_caller()
self.object_gid = self.client_cred.get_gid_object()
# make sure the client_gid is not blank
if not self.client_gid:
raise MissingCallerGID(self.client_cred.get_subject())
# validate the client cert if it exists
if self.peer_cert:
self.verifyPeerCert(self.peer_cert, self.client_gid)
# make sure the client is allowed to perform the operation
if operation:
if not self.client_cred.can_perform(operation):
raise InsufficientRights(operation)
if self.trusted_cert_list:
self.client_cred.verify(self.trusted_cert_file_list, self.config.SFA_CREDENTIAL_SCHEMA)
else:
raise MissingTrustedRoots(self.config.get_trustedroots_dir())
# Make sure the credential's target matches the specified hrn.
# This check does not apply to trusted peers
trusted_peers = [gid.get_hrn() for gid in self.trusted_cert_list]
if hrn and self.client_gid.get_hrn() not in trusted_peers:
target_hrn = self.object_gid.get_hrn()
if not hrn == target_hrn:
raise PermissionError("Target hrn: %s doesn't match specified hrn: %s " % \
(target_hrn, hrn) )
return True
def check_ticket(self, ticket):
"""
Check if the tickt was signed by a trusted cert
"""
if self.trusted_cert_list:
client_ticket = SfaTicket(string=ticket)
client_ticket.verify_chain(self.trusted_cert_list)
else:
raise MissingTrustedRoots(self.config.get_trustedroots_dir())
return True
def verifyPeerCert(self, cert, gid):
# make sure the client_gid matches client's certificate
if not cert.is_pubkey(gid.get_pubkey()):
raise ConnectionKeyGIDMismatch(gid.get_subject()+":"+cert.get_subject())
def verifyGidRequestHash(self, gid, hash, arglist):
key = gid.get_pubkey()
if not key.verify_string(str(arglist), hash):
raise BadRequestHash(hash)
def verifyCredRequestHash(self, cred, hash, arglist):
gid = cred.get_gid_caller()
self.verifyGidRequestHash(gid, hash, arglist)
def validateGid(self, gid):
if self.trusted_cert_list:
gid.verify_chain(self.trusted_cert_list)
def validateCred(self, cred):
if self.trusted_cert_list:
cred.verify(self.trusted_cert_file_list)
def authenticateGid(self, gidStr, argList, requestHash=None):
gid = GID(string = gidStr)
self.validateGid(gid)
# request_hash is optional
if requestHash:
self.verifyGidRequestHash(gid, requestHash, argList)
return gid
def authenticateCred(self, credStr, argList, requestHash=None):
cred = Credential(string = credStr)
self.validateCred(cred)
# request hash is optional
if requestHash:
self.verifyCredRequestHash(cred, requestHash, argList)
return cred
def authenticateCert(self, certStr, requestHash):
cert = Certificate(string=certStr)
# xxx should be validateCred ??
self.validateCred(cert)
def gidNoop(self, gidStr, value, requestHash):
self.authenticateGid(gidStr, [gidStr, value], requestHash)
return value
def credNoop(self, credStr, value, requestHash):
self.authenticateCred(credStr, [credStr, value], requestHash)
return value
def verify_cred_is_me(self, credential):
is_me = False
cred = Credential(string=credential)
caller_gid = cred.get_gid_caller()
caller_hrn = caller_gid.get_hrn()
if caller_hrn != self.config.SFA_INTERFACE_HRN:
raise SfaPermissionDenied(self.config.SFA_INTEFACE_HRN)
return
def get_auth_info(self, auth_hrn):
"""
Given an authority name, return the information for that authority.
This is basically a stub that calls the hierarchy module.
@param auth_hrn human readable name of authority
"""
return self.hierarchy.get_auth_info(auth_hrn)
def veriry_auth_belongs_to_me(self, name):
"""
Verify that an authority belongs to our hierarchy.
This is basically left up to the implementation of the hierarchy
module. If the specified name does not belong, ane exception is
thrown indicating the caller should contact someone else.
@param auth_name human readable name of authority
"""
# get auth info will throw an exception if the authority doesnt exist
self.get_auth_info(name)
def verify_object_belongs_to_me(self, name):
"""
Verify that an object belongs to our hierarchy. By extension,
this implies that the authority that owns the object belongs
to our hierarchy. If it does not an exception is thrown.
@param name human readable name of object
"""
auth_name = self.get_authority(name)
if not auth_name:
auth_name = name
if name == self.config.SFA_INTERFACE_HRN:
return
self.verify_auth_belongs_to_me(auth_name)
def verify_auth_belongs_to_me(self, name):
# get auth info will throw an exception if the authority doesnt exist
self.get_auth_info(name)
def verify_object_permission(self, name):
"""
Verify that the object gid that was specified in the credential
allows permission to the object 'name'. This is done by a simple
prefix test. For example, an object_gid for plc.arizona would
match the objects plc.arizona.slice1 and plc.arizona.
@param name human readable name to test
"""
object_hrn = self.object_gid.get_hrn()
if object_hrn == name:
return
if name.startswith(object_hrn + "."):
return
#if name.startswith(get_authority(name)):
#return
raise PermissionError(name)
def determine_user_rights(self, caller_hrn, reg_record):
"""
Given a user credential and a record, determine what set of rights the
user should have to that record.
This is intended to replace determine_user_rights() and
verify_cancreate_credential()
"""
rl = Rights()
type = reg_record.type
if type == 'slice':
# researchers in the slice are in the DB as-is
researcher_hrns = [ user.hrn for user in reg_record.reg_researchers ]
# locating PIs attached to that slice
slice_pis=reg_record.get_pis()
pi_hrns = [ user.hrn for user in slice_pis ]
if (caller_hrn in researcher_hrns + pi_hrns):
rl.add('refresh')
rl.add('embed')
rl.add('bind')
rl.add('control')
rl.add('info')
elif type == 'authority':
pi_hrns = [ user.hrn for user in reg_record.reg_pis ]
if (caller_hrn == self.config.SFA_INTERFACE_HRN):
rl.add('authority')
rl.add('sa')
rl.add('ma')
if (caller_hrn in pi_hrns):
rl.add('authority')
rl.add('sa')
# NOTE: for the PL implementation, this 'operators' list
# amounted to users with 'tech' role in that site
# it seems like this is not needed any longer, so for now I just drop that
# operator_hrns = reg_record.get('operator',[])
# if (caller_hrn in operator_hrns):
# rl.add('authority')
# rl.add('ma')
elif type == 'user':
rl.add('refresh')
rl.add('resolve')
rl.add('info')
elif type == 'node':
rl.add('operator')
return rl
def get_authority(self, hrn):
return get_authority(hrn)
def filter_creds_by_caller(self, creds, caller_hrn_list):
"""
Returns a list of creds who's gid caller matches the
specified caller hrn
"""
if not isinstance(creds, list):
creds = [creds]
creds = []
if not isinstance(caller_hrn_list, list):
caller_hrn_list = [caller_hrn_list]
for cred in creds:
try:
tmp_cred = Credential(string=cred)
if tmp_cred.get_gid_caller().get_hrn() in [caller_hrn_list]:
creds.append(cred)
except:
pass
return creds
|
from __future__ import print_function
from builtins import object
# reborrowed from https://github.com/monsur/jsoncompare
import json
import sys
import types
TYPE = 'TYPE'
PATH = 'PATH'
VALUE = 'VALUE'
# Borrowed from http://djangosnippets.org/snippets/2247/
# with some modifications.
class Diff(object):
def __init__(self, first, second, with_values=False):
self.difference = []
self.seen = []
not_with_values = not with_values
self.check(first, second, with_values=with_values)
def check(self, first, second, path='', with_values=False):
if with_values and second != None:
if not isinstance(first, type(second)):
message = '%s - %s, %s' % (path, type(first).__name__, type(second).__name__)
self.save_diff(message, TYPE)
if isinstance(first, dict):
for key in first:
# the first part of path must not have trailing dot.
if len(path) == 0:
new_path = key
else:
new_path = "%s.%s" % (path, key)
if isinstance(second, dict):
if key in second:
sec = second[key]
else:
# there are key in the first, that is not presented in the second
self.save_diff(new_path, PATH)
# prevent further values checking.
sec = None
# recursive call
if sec != None:
self.check(first[key], sec, path=new_path, with_values=with_values)
else:
# second is not dict. every key from first goes to the difference
self.save_diff(new_path, PATH)
self.check(first[key], second, path=new_path, with_values=with_values)
# if object is list, loop over it and check.
elif isinstance(first, list):
for (index, item) in enumerate(first):
new_path = "%s[%s]" % (path, index)
# try to get the same index from second
sec = None
if second != None:
try:
sec = second[index]
except (IndexError, KeyError):
# goes to difference
self.save_diff('%s - %s' % (new_path, type(item).__name__), TYPE)
# recursive call
self.check(first[index], sec, path=new_path, with_values=with_values)
# not list, not dict. check for equality (only if with_values is True) and return.
else:
if with_values and second != None:
if first != second:
self.save_diff('%s - %s | %s' % (path, first, second), VALUE)
return
def save_diff(self, diff_message, type_):
if diff_message not in self.difference:
self.seen.append(diff_message)
self.difference.append((type_, diff_message))
def getContentFromFile(filePath):
return open(filePath, 'r').read()
def getContent(location):
content = None
if type(location) is dict:
return location
content = getContentFromFile(location)
if content is None:
raise Error("Could not load content for " + location)
return json.loads(content)
def compare(location1, location2):
json1 = getContent(location1)
json2 = getContent(location2)
diff1 = Diff(json1, json2, True).difference
diff2 = Diff(json2, json1, False).difference
diffs = []
for type, message in diff1:
newType = 'CHANGED'
if type == PATH:
newType = 'REMOVED'
diffs.append({'type': newType, 'message': message})
for type, message in diff2:
diffs.append({'type': 'ADDED', 'message': message})
return diffs
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.exit('Error')
location1 = sys.argv[1]
location2 = sys.argv[2]
diffs = compare(location1, location2)
if len(diffs) > 0:
print('\r\nFound differences comparing ' + location1 + ' and ' + location2)
for diff in diffs:
print(diff['type'] + ': ' + diff['message'])
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dialogflow_v2.services.conversations import pagers
from google.cloud.dialogflow_v2.types import conversation
from google.cloud.dialogflow_v2.types import conversation as gcd_conversation
from google.cloud.dialogflow_v2.types import participant
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import ConversationsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ConversationsGrpcAsyncIOTransport
from .client import ConversationsClient
class ConversationsAsyncClient:
"""Service for managing
[Conversations][google.cloud.dialogflow.v2.Conversation].
"""
_client: ConversationsClient
DEFAULT_ENDPOINT = ConversationsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ConversationsClient.DEFAULT_MTLS_ENDPOINT
conversation_path = staticmethod(ConversationsClient.conversation_path)
parse_conversation_path = staticmethod(ConversationsClient.parse_conversation_path)
conversation_profile_path = staticmethod(ConversationsClient.conversation_profile_path)
parse_conversation_profile_path = staticmethod(ConversationsClient.parse_conversation_profile_path)
message_path = staticmethod(ConversationsClient.message_path)
parse_message_path = staticmethod(ConversationsClient.parse_message_path)
common_billing_account_path = staticmethod(ConversationsClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(ConversationsClient.parse_common_billing_account_path)
common_folder_path = staticmethod(ConversationsClient.common_folder_path)
parse_common_folder_path = staticmethod(ConversationsClient.parse_common_folder_path)
common_organization_path = staticmethod(ConversationsClient.common_organization_path)
parse_common_organization_path = staticmethod(ConversationsClient.parse_common_organization_path)
common_project_path = staticmethod(ConversationsClient.common_project_path)
parse_common_project_path = staticmethod(ConversationsClient.parse_common_project_path)
common_location_path = staticmethod(ConversationsClient.common_location_path)
parse_common_location_path = staticmethod(ConversationsClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConversationsAsyncClient: The constructed client.
"""
return ConversationsClient.from_service_account_info.__func__(ConversationsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConversationsAsyncClient: The constructed client.
"""
return ConversationsClient.from_service_account_file.__func__(ConversationsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> ConversationsTransport:
"""Returns the transport used by the client instance.
Returns:
ConversationsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(type(ConversationsClient).get_transport_class, type(ConversationsClient))
def __init__(self, *,
credentials: ga_credentials.Credentials = None,
transport: Union[str, ConversationsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the conversations client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ConversationsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = ConversationsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_conversation(self,
request: gcd_conversation.CreateConversationRequest = None,
*,
parent: str = None,
conversation: gcd_conversation.Conversation = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcd_conversation.Conversation:
r"""Creates a new conversation. Conversations are auto-completed
after 24 hours.
Conversation Lifecycle: There are two stages during a
conversation: Automated Agent Stage and Assist Stage.
For Automated Agent Stage, there will be a dialogflow agent
responding to user queries.
For Assist Stage, there's no dialogflow agent responding to user
queries. But we will provide suggestions which are generated
from conversation.
If
[Conversation.conversation_profile][google.cloud.dialogflow.v2.Conversation.conversation_profile]
is configured for a dialogflow agent, conversation will start
from ``Automated Agent Stage``, otherwise, it will start from
``Assist Stage``. And during ``Automated Agent Stage``, once an
[Intent][google.cloud.dialogflow.v2.Intent] with
[Intent.live_agent_handoff][google.cloud.dialogflow.v2.Intent.live_agent_handoff]
is triggered, conversation will transfer to Assist Stage.
Args:
request (:class:`google.cloud.dialogflow_v2.types.CreateConversationRequest`):
The request object. The request message for
[Conversations.CreateConversation][google.cloud.dialogflow.v2.Conversations.CreateConversation].
parent (:class:`str`):
Required. Resource identifier of the project creating
the conversation. Format:
``projects/<Project ID>/locations/<Location ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
conversation (:class:`google.cloud.dialogflow_v2.types.Conversation`):
Required. The conversation to create.
This corresponds to the ``conversation`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.Conversation:
Represents a conversation.
A conversation is an interaction between
an agent, including live agents and
Dialogflow agents, and a support
customer. Conversations can include
phone calls and text-based chat
sessions.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, conversation])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = gcd_conversation.CreateConversationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if conversation is not None:
request.conversation = conversation
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_conversation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_conversations(self,
request: conversation.ListConversationsRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListConversationsAsyncPager:
r"""Returns the list of all conversations in the
specified project.
Args:
request (:class:`google.cloud.dialogflow_v2.types.ListConversationsRequest`):
The request object. The request message for
[Conversations.ListConversations][google.cloud.dialogflow.v2.Conversations.ListConversations].
parent (:class:`str`):
Required. The project from which to list all
conversation. Format:
``projects/<Project ID>/locations/<Location ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.services.conversations.pagers.ListConversationsAsyncPager:
The response message for
[Conversations.ListConversations][google.cloud.dialogflow.v2.Conversations.ListConversations].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = conversation.ListConversationsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_conversations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListConversationsAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def get_conversation(self,
request: conversation.GetConversationRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> conversation.Conversation:
r"""Retrieves the specific conversation.
Args:
request (:class:`google.cloud.dialogflow_v2.types.GetConversationRequest`):
The request object. The request message for
[Conversations.GetConversation][google.cloud.dialogflow.v2.Conversations.GetConversation].
name (:class:`str`):
Required. The name of the conversation. Format:
``projects/<Project ID>/locations/<Location ID>/conversations/<Conversation ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.Conversation:
Represents a conversation.
A conversation is an interaction between
an agent, including live agents and
Dialogflow agents, and a support
customer. Conversations can include
phone calls and text-based chat
sessions.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = conversation.GetConversationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_conversation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def complete_conversation(self,
request: conversation.CompleteConversationRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> conversation.Conversation:
r"""Completes the specified conversation. Finished
conversations are purged from the database after 30
days.
Args:
request (:class:`google.cloud.dialogflow_v2.types.CompleteConversationRequest`):
The request object. The request message for
[Conversations.CompleteConversation][google.cloud.dialogflow.v2.Conversations.CompleteConversation].
name (:class:`str`):
Required. Resource identifier of the conversation to
close. Format:
``projects/<Project ID>/locations/<Location ID>/conversations/<Conversation ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.Conversation:
Represents a conversation.
A conversation is an interaction between
an agent, including live agents and
Dialogflow agents, and a support
customer. Conversations can include
phone calls and text-based chat
sessions.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = conversation.CompleteConversationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.complete_conversation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_messages(self,
request: conversation.ListMessagesRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListMessagesAsyncPager:
r"""Lists messages that belong to a given conversation. ``messages``
are ordered by ``create_time`` in descending order. To fetch
updates without duplication, send request with filter
``create_time_epoch_microseconds > [first item's create_time of previous request]``
and empty page_token.
Args:
request (:class:`google.cloud.dialogflow_v2.types.ListMessagesRequest`):
The request object. The request message for
[Conversations.ListMessages][google.cloud.dialogflow.v2.Conversations.ListMessages].
parent (:class:`str`):
Required. The name of the conversation to list messages
for. Format:
``projects/<Project ID>/locations/<Location ID>/conversations/<Conversation ID>``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.services.conversations.pagers.ListMessagesAsyncPager:
The response message for
[Conversations.ListMessages][google.cloud.dialogflow.v2.Conversations.ListMessages].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = conversation.ListMessagesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_messages,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListMessagesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflow",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"ConversationsAsyncClient",
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
(c) 2014 Brant Faircloth || http://faircloth-lab.org/
All rights reserved.
This code is distributed under a 3-clause BSD license. Please see
LICENSE.txt for more information.
Created on 21 April 2014 20:54 PDT (-0700)
"""
import os
import sys
import glob
import shutil
import argparse
import subprocess
from Bio import AlignIO
import pdb
class FullPaths(argparse.Action):
"""Expand user- and relative-paths"""
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, os.path.abspath(os.path.expanduser(values)))
class CreateDir(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
# get the full path
d = os.path.abspath(os.path.expanduser(values))
# check to see if directory exists
if os.path.exists(d):
answer = raw_input("[WARNING] Output directory exists, REMOVE [Y/n]? ")
if answer == "Y":
shutil.rmtree(d)
else:
print "[QUIT]"
sys.exit()
# create the new directory
os.makedirs(d)
# return the full path
setattr(namespace, self.dest, d)
class GroupError(Exception):
def __init__(self, message, group, alignment):
# Call the base class constructor with the parameters it needs
Exception.__init__(self, message)
# Now for your custom code...
self.group = group
self.alignment = alignment
def is_dir(dirname):
if not os.path.isdir(dirname):
msg = "{0} is not a directory".format(dirname)
raise argparse.ArgumentTypeError(msg)
else:
return dirname
def is_file(filename):
if not os.path.isfile:
msg = "{0} is not a file".format(filename)
raise argparse.ArgumentTypeError(msg)
else:
return filename
def which(prog):
cmd = ["which", prog]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
if stderr:
raise EnvironmentError("Program {} does not appear to be installed")
else:
return stdout.strip()
def get_alignments(alignment_dir):
alignments = []
for ftype in ('.phylip', '.phy'):
alignments.extend(glob.glob(os.path.join(alignment_dir, "*{}".format(ftype))))
return alignments
def satisfy_one_taxon_group(taxa_in_align, taxon_group):
try:
isinstance(taxon_group, list)
except:
raise AssertionError("Taxon group is not a list.")
group_set = set(taxon_group)
# ensure there is at least one member in each group
if len(taxa_in_align.intersection(group_set)) >= 1:
return True
else:
return False
def get_taxa_in_alignment(alignment):
aln = AlignIO.read(alignment, "phylip-relaxed")
taxa_in_align = set([taxon.id for taxon in aln])
return taxa_in_align
def satisfy_all_taxon_groups(alignment, taxon_groups):
"""given an input alignment, see if any taxa in list are in file"""
taxa_in_align = get_taxa_in_alignment(alignment)
taxa_present = []
for group_name, taxon_group in taxon_groups.iteritems():
if satisfy_one_taxon_group(taxa_in_align, taxon_group):
taxa_present.append(True)
else:
taxa_present.append(False)
if all(taxa_present):
return True
else:
raise GroupError(
"Not all taxa present in Group",
group_name,
os.path.basename(alignment),
)
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from ffad import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^(?P<draft_id>\d+)/$', views.draft),
url(r'^(?P<draft_id>\d+)/register$', views.register),
url(r'^(?P<draft_id>\d+)/get_manager_updates$', views.get_manager_updates),
url(r'^(?P<draft_id>\d+)/get_player_updates$', views.get_player_updates),
url(r'^(?P<draft_id>\d+)/get_team$', views.get_team),
url(r'^(?P<draft_id>\d+)/place_bid$', views.place_bid)
)
|
#!/usr/bin/env python3
'''
tests/lib/subtest.py
Utility functions for running sub-tests within a test case. Includes additional
logging to add context during sub-test execution.
'''
import logging
import unittest
from tests.lib.decorator import log_function
logger = logging.getLogger('sublime-ycmd.' + __name__)
def _is_args_kwargs(test_case):
if not isinstance(test_case, (tuple, list)):
return False
if len(test_case) != 2:
return False
if not isinstance(test_case[1], dict):
return False
return True
def map_test_function(test_instance, test_function, test_cases):
assert isinstance(test_instance, unittest.TestCase), \
'test instance must be a unittest.TestCase: %r' % (test_instance)
assert callable(test_function), \
'test function must be callable: %r' % (test_function)
assert hasattr(test_cases, '__iter__'), \
'test cases must be iterable: %r' % (test_cases)
for test_index, test_case in enumerate(test_cases, start=1):
is_args_kwargs = _is_args_kwargs(test_case)
is_kwargs = isinstance(test_case, dict)
is_args = not (is_args_kwargs or is_kwargs)
if is_args_kwargs:
test_args, test_kwargs = test_case
elif is_kwargs:
test_args = tuple()
test_kwargs = test_case
elif is_args:
test_args = test_case
test_kwargs = dict()
log_args = is_args_kwargs or is_args
log_kwargs = is_args_kwargs or is_kwargs
wrapped_test_function = log_function(
desc='[%d]' % (test_index),
include_args=log_args, include_kwargs=log_kwargs,
)(test_function)
with test_instance.subTest(num=test_index,
args=test_args, kwargs=test_kwargs):
wrapped_test_function(*test_args, **test_kwargs)
|
# This file is part of goo-dataproxy.
#
# Copyright (c) 2103-2014 by Nucleo de Computacao Cientifica, UNESP
#
# Authors:
# Beraldo Leal <beraldo AT ncc DOT unesp DOT br>
# Gabriel von. Winckler <winckler AT ncc DOT unesp DOT br>
#
# goo-dataproxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# goo-dataproxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
#
from urlparse import urlparse
from django.conf import settings
from django.core.servers.basehttp import FileWrapper
import os
def upload(file_obj, sha1):
if not os.path.exists(settings.STORAGE_BACKEND_LOCAL_DIR):
os.makedirs(settings.STORAGE_BACKEND_LOCAL_DIR) # pragma: no cover
input_file = open(file_obj, 'r')
output = os.path.join(settings.STORAGE_BACKEND_LOCAL_DIR,sha1)
with open(output, 'wb+') as destination:
while True:
chunk = input_file.read(1024)
if not chunk:
break
destination.write(chunk)
def download(sha1):
filename = os.path.join(settings.STORAGE_BACKEND_LOCAL_DIR,sha1)
return file(filename)
def delete(sha1):
filename = os.path.join(settings.STORAGE_BACKEND_LOCAL_DIR,sha1)
os.unlink(filename)
|
from __future__ import unicode_literals
import unittest
import spotifyconnect
from spotifyconnect import utils
class ErrorTest(unittest.TestCase):
def test_error_is_an_exception(self):
error = spotifyconnect.Error(0)
self.assertIsInstance(error, Exception)
def test_maybe_raise(self):
with self.assertRaises(spotifyconnect.LibError):
spotifyconnect.Error.maybe_raise(
spotifyconnect.ErrorType.WrongAPIVersion)
def test_maybe_raise_does_not_raise_if_ok(self):
spotifyconnect.Error.maybe_raise(spotifyconnect.ErrorType.Ok)
def test_maybe_raise_does_not_raise_if_error_is_ignored(self):
spotifyconnect.Error.maybe_raise(
spotifyconnect.ErrorType.WrongAPIVersion,
ignores=[spotifyconnect.ErrorType.WrongAPIVersion])
def test_maybe_raise_works_with_any_iterable(self):
spotifyconnect.Error.maybe_raise(
spotifyconnect.ErrorType.WrongAPIVersion,
ignores=(spotifyconnect.ErrorType.WrongAPIVersion,))
class LibErrorTest(unittest.TestCase):
def test_is_an_error(self):
error = spotifyconnect.LibError(0)
self.assertIsInstance(error, spotifyconnect.Error)
def test_has_error_type(self):
error = spotifyconnect.LibError(0)
self.assertEqual(error.error_type, 0)
error = spotifyconnect.LibError(1)
self.assertEqual(error.error_type, 1)
def test_is_equal_if_same_error_type(self):
self.assertEqual(
spotifyconnect.LibError(0),
spotifyconnect.LibError(0))
def test_is_not_equal_if_different_error_type(self):
self.assertNotEqual(
spotifyconnect.LibError(0),
spotifyconnect.LibError(1))
def test_error_has_useful_repr(self):
error = spotifyconnect.LibError(0)
self.assertIn('Ok', repr(error))
def test_error_has_useful_string_representation(self):
error = spotifyconnect.LibError(0)
self.assertEqual('%s' % error, 'Ok')
self.assertIsInstance('%s' % error, utils.text_type)
error = spotifyconnect.LibError(3)
self.assertEqual('%s' % error, 'WrongAPIVersion')
def test_has_error_constants(self):
self.assertEqual(
spotifyconnect.LibError.Ok,
spotifyconnect.LibError(
spotifyconnect.ErrorType.Ok))
self.assertEqual(
spotifyconnect.LibError.WrongAPIVersion,
spotifyconnect.LibError(spotifyconnect.ErrorType.WrongAPIVersion))
class ErrorTypeTest(unittest.TestCase):
def test_has_error_type_constants(self):
self.assertEqual(spotifyconnect.ErrorType.Ok, 0)
self.assertEqual(spotifyconnect.ErrorType.Failed, 1)
self.assertEqual(spotifyconnect.ErrorType.InitFailed, 2)
self.assertEqual(spotifyconnect.ErrorType.WrongAPIVersion, 3)
self.assertEqual(spotifyconnect.ErrorType.NullArgument, 4)
self.assertEqual(spotifyconnect.ErrorType.InvalidArgument, 5)
self.assertEqual(spotifyconnect.ErrorType.Uninitialized, 6)
self.assertEqual(spotifyconnect.ErrorType.AlreadyInitialized, 7)
self.assertEqual(spotifyconnect.ErrorType.LoginBadCredentials, 8)
self.assertEqual(spotifyconnect.ErrorType.NeedsPremium, 9)
self.assertEqual(spotifyconnect.ErrorType.TravelRestriction, 10)
self.assertEqual(spotifyconnect.ErrorType.ApplicationBanned, 11)
self.assertEqual(spotifyconnect.ErrorType.GeneralLoginError, 12)
self.assertEqual(spotifyconnect.ErrorType.Unsupported, 13)
self.assertEqual(spotifyconnect.ErrorType.NotActiveDevice, 14)
self.assertEqual(spotifyconnect.ErrorType.PlaybackErrorStart, 1000)
self.assertEqual(spotifyconnect.ErrorType.GeneralPlaybackError, 1001)
self.assertEqual(spotifyconnect.ErrorType.PlaybackRateLimited, 1002)
self.assertEqual(spotifyconnect.ErrorType.Unknown, 1003)
class TimeoutTest(unittest.TestCase):
def test_is_an_error(self):
error = spotifyconnect.Timeout(0.5)
self.assertIsInstance(error, spotifyconnect.Error)
def test_has_useful_repr(self):
error = spotifyconnect.Timeout(0.5)
self.assertIn('Operation did not complete in 0.500s', repr(error))
def test_has_useful_string_representation(self):
error = spotifyconnect.Timeout(0.5)
self.assertEqual('%s' % error, 'Operation did not complete in 0.500s')
self.assertIsInstance('%s' % error, utils.text_type)
|
# coding: utf-8
# This Source Code is subject to the terms of the Mozilla Public License
# version 2.0 (the "License"). You can obtain a copy of the License at
# http://mozilla.org/MPL/2.0/.
import os, sys, re, subprocess, buildtools
from getopt import getopt, GetoptError
class Command(object):
name = property(lambda self: self._name)
shortDescription = property(lambda self: self._shortDescription,
lambda self, value: self.__dict__.update({'_shortDescription': value}))
description = property(lambda self: self._description,
lambda self, value: self.__dict__.update({'_description': value}))
params = property(lambda self: self._params,
lambda self, value: self.__dict__.update({'_params': value}))
supportedTypes = property(lambda self: self._supportedTypes,
lambda self, value: self.__dict__.update({'_supportedTypes': value}))
options = property(lambda self: self._options)
def __init__(self, handler, name):
self._handler = handler
self._name = name
self._shortDescription = ''
self._description = ''
self._params = ''
self._supportedTypes = None
self._options = []
self.addOption('Show this message and exit', short='h', long='help')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def __call__(self, baseDir, scriptName, opts, args, type):
return self._handler(baseDir, scriptName, opts, args, type)
def isSupported(self, type):
return self._supportedTypes == None or type in self._supportedTypes
def addOption(self, description, short=None, long=None, value=None):
self._options.append((description, short, long, value))
def parseArgs(self, args):
shortOptions = map(lambda o: o[1]+':' if o[3] != None else o[1], filter(lambda o: o[1] != None, self._options))
longOptions = map(lambda o: o[2]+'=' if o[3] != None else o[2], filter(lambda o: o[2] != None, self._options))
return getopt(args, ''.join(shortOptions), longOptions)
commandsList = []
commands = {}
def addCommand(handler, name):
if isinstance(name, basestring):
aliases = ()
else:
name, aliases = (name[0], name[1:])
global commandsList, commands
command = Command(handler, name)
commandsList.append(command)
commands[name] = command
for alias in aliases:
commands[alias] = command
return command
def splitByLength(string, maxLen):
parts = []
currentPart = ''
for match in re.finditer(r'\s*(\S+)', string):
if len(match.group(0)) + len(currentPart) < maxLen:
currentPart += match.group(0)
else:
parts.append(currentPart)
currentPart = match.group(1)
if len(currentPart):
parts.append(currentPart)
return parts
def usage(scriptName, type, commandName=None):
if commandName == None:
global commandsList
descriptions = []
for command in commandsList:
if not command.isSupported(type):
continue
commandText = ('%s %s' % (command.name, command.params)).ljust(39)
descriptionParts = splitByLength(command.shortDescription, 29)
descriptions.append(' %s %s %s' % (scriptName, commandText, descriptionParts[0]))
for part in descriptionParts[1:]:
descriptions.append(' %s %s %s' % (' ' * len(scriptName), ' ' * len(commandText), part))
print '''Usage:
%(descriptions)s
For details on a command run:
%(scriptName)s <command> --help
''' % {
'scriptName': scriptName,
'descriptions': '\n'.join(descriptions)
}
else:
global commands
command = commands[commandName]
description = '\n'.join(map(lambda s: '\n'.join(splitByLength(s, 80)), command.description.split('\n')))
options = []
for descr, short, long, value in command.options:
if short == None:
shortText = ''
elif value == None:
shortText = '-%s' % short
else:
shortText = '-%s %s' % (short, value)
if long == None:
longText = ''
elif value == None:
longText = '--%s' % long
else:
longText = '--%s=%s' % (long, value)
descrParts = splitByLength(descr, 46)
options.append(' %s %s %s' % (shortText.ljust(11), longText.ljust(19), descrParts[0]))
for part in descrParts[1:]:
options.append(' %s %s %s' % (' ' * 11, ' ' * 19, part))
print '''%(scriptName)s %(name)s %(params)s
%(description)s
Options:
%(options)s
''' % {
'scriptName': scriptName,
'name': command.name,
'params': command.params,
'description': description,
'options': '\n'.join(options)
}
def runBuild(baseDir, scriptName, opts, args, type):
locales = None
buildNum = None
multicompartment = False
releaseBuild = False
keyFile = None
limitMetadata = False
for option, value in opts:
if option in ('-l', '--locales'):
locales = value.split(',')
elif option in ('-b', '--build'):
buildNum = int(value)
elif option in ('-k', '--key'):
keyFile = value
elif option in ('-m', '--multi-compartment'):
multicompartment = True
elif option in ('-r', '--release'):
releaseBuild = True
elif option == '--babelzilla':
locales = 'all'
limitMetadata = True
outFile = args[0] if len(args) > 0 else None
if type == 'gecko':
import buildtools.packager as packager
packager.createBuild(baseDir, outFile=outFile, locales=locales, buildNum=buildNum,
releaseBuild=releaseBuild, keyFile=keyFile,
limitMetadata=limitMetadata, multicompartment=multicompartment)
elif type == 'kmeleon':
import buildtools.packagerKMeleon as packagerKMeleon
packagerKMeleon.createBuild(baseDir, outFile=outFile, locales=locales,
buildNum=buildNum, releaseBuild=releaseBuild)
def runAutoInstall(baseDir, scriptName, opts, args, type):
if len(args) == 0:
print 'Port of the Extension Auto-Installer needs to be specified'
usage(scriptName, type, 'autoinstall')
return
multicompartment = False
for option, value in opts:
if option in ('-m', '--multi-compartment'):
multicompartment = True
if ':' in args[0]:
host, port = args[0].rsplit(':', 1)
else:
host, port = ('localhost', args[0])
import buildtools.packager as packager
packager.autoInstall(baseDir, host, port, multicompartment=multicompartment)
def setupTranslations(baseDir, scriptName, opts, args, type):
if len(args) < 1:
print 'Project key is required to update translation master files.'
usage(scriptName, type, 'setuptrans')
return
key = args[0]
import buildtools.packager as packager
locales = packager.getLocales(baseDir, True)
basename = packager.readMetadata(baseDir).get('general', 'baseName')
import buildtools.localeTools as localeTools
localeTools.setupTranslations(locales, basename, key)
def updateTranslationMaster(baseDir, scriptName, opts, args, type):
if len(args) < 1:
print 'Project key is required to update translation master files.'
usage(scriptName, type, 'translate')
return
key = args[0]
import buildtools.packager as packager
defaultLocaleDir = os.path.join(packager.getLocalesDir(baseDir), packager.defaultLocale)
basename = packager.readMetadata(baseDir).get('general', 'baseName')
import buildtools.localeTools as localeTools
localeTools.updateTranslationMaster(defaultLocaleDir, packager.defaultLocale, basename, key)
def getTranslations(baseDir, scriptName, opts, args, type):
if len(args) < 1:
print 'Project key is required to update translation master files.'
usage(scriptName, type, 'translate')
return
key = args[0]
import buildtools.packager as packager
localesDir = packager.getLocalesDir(baseDir)
basename = packager.readMetadata(baseDir).get('general', 'baseName')
import buildtools.localeTools as localeTools
localeTools.getTranslations(localesDir, packager.defaultLocale, basename, key)
def showDescriptions(baseDir, scriptName, opts, args, type):
locales = None
for option, value in opts:
if option in ('-l', '--locales'):
locales = value.split(',')
import buildtools.packager as packager
if locales == None:
locales = packager.getLocales(baseDir)
elif locales == 'all':
locales = packager.getLocales(baseDir, True)
data = packager.readLocaleMetadata(baseDir, locales)
localeCodes = data.keys()
localeCodes.sort()
for localeCode in localeCodes:
locale = data[localeCode]
print ('''%s
%s
%s
%s
%s
''' % (localeCode,
locale['name'] if 'name' in locale else 'None',
locale['description'] if 'description' in locale else 'None',
locale['description.short'] if 'description.short' in locale else 'None',
locale['description.long'] if 'description.long' in locale else 'None',
)).encode('utf-8')
def generateDocs(baseDir, scriptName, opts, args, type):
if len(args) == 0:
print 'No target directory specified for the documentation'
usage(scriptName, type, 'docs')
return
targetDir = args[0]
toolkit = None
for option, value in opts:
if option in ('-t', '--toolkit'):
toolkit = value
if toolkit == None:
toolkit = os.path.join(baseDir, 'jsdoc-toolkit')
if not os.path.exists(toolkit):
subprocess.Popen(['hg', 'clone', 'https://hg.adblockplus.org/jsdoc-toolkit/', toolkit]).communicate()
command = [sys.executable,
os.path.join(toolkit, 'jsrun.py'),
'-t=' + os.path.join(toolkit, 'templates', 'jsdoc'),
'-d=' + targetDir,
'-a',
'-p',
'-x=js,jsm',
os.path.join(baseDir, 'modules'),
os.path.join(baseDir, 'components')]
subprocess.Popen(command).communicate()
def runReleaseAutomation(baseDir, scriptName, opts, args, type):
buildtoolsRepo = buildtools.__path__[0]
keyFile = None
downloadsRepo = os.path.join(baseDir, '..', 'downloads')
for option, value in opts:
if option in ('-k', '--key'):
keyFile = value
elif option in ('-d', '--downloads'):
downloadsRepo = value
if type == 'gecko':
if len(args) == 0:
print 'No version number specified for the release'
usage(scriptName, type, 'release')
return
version = args[0]
if re.search(r'[^\w\.]', version):
print 'Wrong version number format'
usage(scriptName, type, 'release')
return
if keyFile == None:
print 'Warning: no key file specified, creating an unsigned release build\n'
import buildtools.releaseAutomation as releaseAutomation
releaseAutomation.run(baseDir, version, keyFile, downloadsRepo, buildtoolsRepo)
else:
import buildtools.releaseAutomationKMeleon as releaseAutomationKMeleon
releaseAutomationKMeleon.run(baseDir, downloadsRepo, buildtoolsRepo)
with addCommand(lambda baseDir, scriptName, opts, args, type: usage(scriptName, type), ('help', '-h', '--help')) as command:
command.shortDescription = 'Show this message'
with addCommand(runBuild, 'build') as command:
command.shortDescription = 'Create a build'
command.description = 'Creates an extension build with given file name. If output_file is missing a default name will be chosen.'
command.params = '[options] [output_file]'
command.addOption('Only include the given locales (if omitted: all locales not marked as incomplete)', short='l', long='locales', value='l1,l2,l3')
command.addOption('Use given build number (if omitted the build number will be retrieved from Mercurial)', short='b', long='build', value='num')
command.addOption('File containing private key and certificates required to sign the package', short='k', long='key', value='file')
command.addOption('Create a build for leak testing', short='m', long='multi-compartment')
command.addOption('Create a release build', short='r', long='release')
command.addOption('Create a build for Babelzilla', long='babelzilla')
command.supportedTypes = ('gecko', 'kmeleon')
with addCommand(runAutoInstall, 'autoinstall') as command:
command.shortDescription = 'Install extension automatically'
command.description = 'Will automatically install the extension in a browser running Extension Auto-Installer. If host parameter is omitted assumes that the browser runs on localhost.'
command.params = '[<host>:]<port>'
command.addOption('Create a build for leak testing', short='m', long='multi-compartment')
command.supportedTypes = ('gecko')
with addCommand(setupTranslations, 'setuptrans') as command:
command.shortDescription = 'Sets up translation languages'
command.description = 'Sets up translation languages for the project on crowdin.net.'
command.params = '[options] project-key'
command.supportedTypes = ('gecko')
with addCommand(updateTranslationMaster, 'translate') as command:
command.shortDescription = 'Updates translation master files'
command.description = 'Updates the translation master files in the project on crowdin.net.'
command.params = '[options] project-key'
command.supportedTypes = ('gecko')
with addCommand(getTranslations, 'gettranslations') as command:
command.shortDescription = 'Downloads translation updates'
command.description = 'Downloads updated translations from crowdin.net.'
command.params = '[options] project-key'
command.supportedTypes = ('gecko')
with addCommand(showDescriptions, 'showdesc') as command:
command.shortDescription = 'Print description strings for all locales'
command.description = 'Display description strings for all locales as specified in the corresponding meta.properties files.'
command.addOption('Only include the given locales', short='l', long='locales', value='l1,l2,l3')
command.params = '[options]'
command.supportedTypes = ('gecko')
with addCommand(generateDocs, 'docs') as command:
command.shortDescription = 'Generate documentation'
command.description = 'Generate documentation files and write them into the specified directory.'
command.addOption('JsDoc Toolkit location', short='t', long='toolkit', value='dir')
command.params = '[options] <directory>'
command.supportedTypes = ('gecko')
with addCommand(runReleaseAutomation, 'release') as command:
command.shortDescription = 'Run release automation'
command.description = 'Note: If you are not the project owner then you '\
'probably don\'t want to run this!\n\n'\
'Runs release automation: creates downloads for the new version, tags '\
'source code repository as well as downloads and buildtools repository.'
command.addOption('File containing private key and certificates required to sign the release', short='k', long='key', value='file')
command.addOption('Directory containing downloads repository (if omitted ../downloads is assumed)', short='d', long='downloads', value='dir')
command.params = '[options] <version>'
command.supportedTypes = ('gecko', 'kmeleon')
def processArgs(baseDir, args, type='gecko'):
global commands
scriptName = os.path.basename(args[0])
args = args[1:]
if len(args) == 0:
args = ['build']
print '''
No command given, assuming "build". For a list of commands run:
%s help
''' % scriptName
command = args[0]
if command in commands:
if commands[command].isSupported(type):
try:
opts, args = commands[command].parseArgs(args[1:])
except GetoptError, e:
print str(e)
usage(scriptName, type, command)
sys.exit(2)
for option, value in opts:
if option in ('-h', '--help'):
usage(scriptName, type, command)
sys.exit()
commands[command](baseDir, scriptName, opts, args, type)
else:
print 'Command %s is not supported for this application type' % command
usage(scriptName, type)
else:
print 'Command %s is unrecognized' % command
usage(scriptName, type)
|
"""
Copyright 2017 Felix Widmaier
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from distutils.core import setup
setup(
name='bull',
version='2017.1',
packages=['bull.models', 'bull.optimizers', 'bull.layers',
'bull.layers.nn', 'bull.layers.rnn', 'bull.layers.conv', 'bull.core.util',
'bull.cost'],
url='https://github.com/felixwidmaier/bull',
license='Apache License 2.0',
author='Felix Widmaier',
author_email='dev.felixwidmaier@gmx.de',
description='Up and coming machine learning library', requires=['numpy', 'h5py']
)
|
import unittest
import join_circles
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testStock(self):
self.assertEqual(18, join_circles.numCombos([1,2,3,4], 3))
self.assertEqual(180, join_circles.numCombos([1,2,3,4,5], 3))
s, num = join_circles.getProduct([1,2,3,4,5], 1)
self.assertEqual(720, s)
s, num = join_circles.getProduct([1,2,3,4,5], 2)
self.assertEqual(5850, s)
s, num = join_circles.getProduct([1,2,3,4,5], 3)
self.assertEqual(25200, s)
k=6
self.assertEqual(150, join_circles.numCombos([i+1 for i in range(k)], 2))
self.assertEqual(900, join_circles.numCombos([i+1 for i in range(k)], 3))
self.assertEqual(2700, join_circles.numCombos([i+1 for i in range(k)], 4))
s, num = join_circles.getProduct([i+1 for i in range(k)], 2)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 2), s)
s, num = join_circles.getProduct([i+1 for i in range(k)], 3)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 3), s)
s, num = join_circles.getProduct([i+1 for i in range(k)], 4)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 4), s)
k=7
s, num = join_circles.getProduct([i+1 for i in range(k)], 2)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 2), s)
s, num = join_circles.getProduct([i+1 for i in range(k)], 3)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 3), s)
s, num = join_circles.getProduct([i+1 for i in range(k)], 4)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 4), s)
s, num = join_circles.getProduct([i+1 for i in range(k)], 5)
self.assertEqual(join_circles.getSquare([i+1 for i in range(k)], 5), s)
if __name__ == "__main__":
unittest.main()
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details
# http://www.gnu.org/licenses/gpl-3.0.txt
from ..base import FreeSpaceAPIBase
from ...logging import make_logger # isort:skip
log = make_logger(__name__)
class FreeSpaceAPI(FreeSpaceAPIBase):
async def get_free_space(self, path):
"""Return free space in directory `path` in bytes"""
response = await self._rpc.free_space(path=path)
log.debug('Free space in %r: %r', path, response)
if path == response['path']:
return response['size-bytes']
else:
raise RuntimeError('Expected path %r, got %r' % (path, response['path']))
|
import pytest
from chirc import replies
import chirc.tests.fixtures as fixtures
class TestWHOIS(object):
@pytest.mark.category("WHOIS")
def test_whois1(self, irc_session):
client1 = irc_session.connect_user("user1", "User One")
client2 = irc_session.connect_user("user2", "User Two")
client1.send_cmd("WHOIS user2")
reply = irc_session.get_reply(client1, expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "User Two")
reply = irc_session.get_reply(client1, expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(client1, expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
@pytest.mark.category("WHOIS")
def test_whois_nonick(self, irc_session):
client1 = irc_session.connect_user("user1", "User One")
client1.send_cmd("WHOIS user2")
reply = irc_session.get_reply(client1, expect_code = replies.ERR_NOSUCHNICK, expect_nick = "user1",
expect_nparams = 2, expect_short_params = ["user2"],
long_param_re = "No such nick/channel")
def _test_userchannels(self, irc_session, channels, nick, channelstring):
whois_channels = channelstring[0:-1].split()
for qchannel in whois_channels:
if qchannel[0] in ('@', '+'):
modchar = qchannel[0]
channel = qchannel[1:]
else:
modchar = ""
channel = qchannel
users = channels[channel]
assert modchar + nick in users, "RPL_WHOISCHANNELS: Expected {} to be in {} (for channels '{}')".format(modchar + nick, channel, channelstring)
@pytest.mark.category("UPDATE_1B")
def test_whois2(self, irc_session):
users = irc_session.connect_and_join_channels(fixtures.channels3)
users["user1"].send_cmd("WHOIS user2")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "user2")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISCHANNELS,
expect_nparams = 2)
self._test_userchannels(irc_session, fixtures.channels3, "user2", reply.params[2][1:])
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
@pytest.mark.category("UPDATE_1B")
def test_whois3(self, irc_session):
users = irc_session.connect_and_join_channels(fixtures.channels3, aways=["user8"], ircops=["user8"])
users["user1"].send_cmd("WHOIS user8")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "user8")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISCHANNELS,
expect_nparams = 2)
self._test_userchannels(irc_session, fixtures.channels3, "user8", reply.params[2][1:])
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_AWAY, expect_nick = "user1",
expect_nparams = 2, expect_short_params = ["user8"],
long_param_re = "I'm away")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISOPERATOR,
expect_nparams = 2, expect_short_params = ["user8"],
long_param_re = "is an IRC operator")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
|
from django import forms
from django.contrib.auth.models import User
from captcha.fields import CaptchaField
from models import UserProfile
class UserForm(forms.ModelForm):
password = forms.CharField(required=True, widget=forms.PasswordInput(
attrs={'class':'form-control',
'placeholder' :'Password',
}))
captcha = CaptchaField()
username = forms.CharField(required=True, widget=forms.TextInput(
attrs={'class':'form-control',
'placeholder' :'User Name',
}))
email = forms.EmailField(required=True,widget=forms.TextInput(
attrs={'class':'form-control',
'placeholder' :'name@gmail.com',
}))
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
phone = forms.CharField(required=True,widget=forms.TextInput(
attrs={'class':'form-control',
'placeholder' :'(416)-111-1234',
}))
class Meta:
model = UserProfile
fields = ('phone',)
|
# -*- coding:utf-8 -*-
# @author xupingmao <578749341@qq.com>
# @since 2020/12/08 01:46:35
# @modified 2021/07/04 17:09:54
# -*- coding:utf-8 -*-
# @since 2018-11-22 00:46:26
import os
import re
import math
import time
import web
import xconfig
import xutils
import xauth
import xmanager
import xtables
import random
from xutils import cacheutil
from xutils.htmlutil import *
from xutils import dbutil
from xtemplate import BasePlugin
HEADER = """
<!-- 插件头部 -->
<div class="card">
<div class="grid-title btn-line-height">
<span>{{plugin.title}}</span>
<div class="float-right">
<a class="btn btn-default" href="/fs_list">收藏夹</a>
<a class="btn btn-default" href="/fs_tools">工具</a>
</div>
</div>
</div>
{% include plugin/header/plugin_category.html %}
"""
HTML = '''
<div class="card">
{% for note in notes %}
<a class="list-link" href="{{note.url}}">
<span>{{note.title}}</span>
<div class="float-right">
{% if note.visit_cnt != None %}
<i class="fa fa-eye-o"></i>
<span class="plugin-right-span">热度: {{note.visit_cnt}}</span>
{% end %}
<i class="fa fa-chevron-right"></i>
</div>
</a>
{% end %}
</div>
'''
class Main(BasePlugin):
title = u"文件工具"
category = "dir"
rows = 0
editable = False
def handle(self, input):
user = xauth.current_name()
notes = xmanager.find_plugins("dir")
xmanager.add_visit_log(user, "/fs_tools")
self.writeheader(HEADER, plugin = self, plugin_category = "dir")
self.writetemplate(HTML, notes = notes)
xurls = (
r"/fs_tools", Main
)
|
"""
Test utils
"""
import tempfile
import PIL
import numpy as np
from clouds.util.constants import HealthStatus
def createXors(tgt):
#create test xor images
xorIn = [
((255, 255, 255, 255), HealthStatus.GOOD),
((255, 255, 0, 0), HealthStatus.CLOUDY),
((0, 0, 0, 0), HealthStatus.GOOD),
((0, 0, 255, 255), HealthStatus.CLOUDY),
]
xorImages = []
for ar, expected in xorIn:
npar = np.array(ar, dtype=np.uint8).reshape(2, 2)
image = PIL.Image.fromarray(npar)
#pybrain needs a lot of test input. We'll make 20 of each image
for i in range(20):
path = tempfile.mktemp(suffix=".png", prefix='xor_', dir=tgt)
image.save(path)
xorImages.append((path, expected))
return xorImages
class MockStream(object):
def __init__(self, inputQueue):
"""
A class used as a replacement for stream objects. As data are recieved on the inputQueue,
make them available to `readline`.
"""
self.q = inputQueue
def read(self):
return [l for l in self.readline()]
def readline(self):
"""
Block until an item appears in the queue.
"""
return self.q.get()
def close(self):
pass
|
"""Testcases for the clusterstats module."""
import unittest
import json
from pprint import pprint
import httpretty
from requests import HTTPError, Timeout
import pandas as pd
from clusterstats import http
from clusterstats import stats
class ClusterStatsTest(unittest.TestCase):
def test_read_servers(self):
"""Test the _read_servers private method"""
self.assertEquals(len(http._read_servers("data/servers.txt")), 1000)
def test_transform_hostname_to_http_endpoint(self):
"""Test the _transform_hostname_to_http_endpoint"""
hosts=["server1", "server2"]
expected_out=["http://server1/status", "http://server2/status"]
self.assertEquals(http._transform_hostname_to_http_endpoint(hosts), expected_out)
@httpretty.activate
def test_http_OK(self):
"""Test Http Connectivity - Success scenario"""
url='http://myserver/status'
content=('{"Application":"Webapp2","Version":"0.0.2",'
'"Uptime":8102471691,"Request_Count":4134752620,'
'"Error_Count":2772072365,"Success_Count":1362680255}')
httpretty.register_uri(
method=httpretty.GET,
uri=url,
status=200,
body=content,
content_type="application/json"
)
result=self._connect(url)
self.assertEquals(result[0], 0)
self.assertTrue(result[1]["Application"] == "Webapp2")
@httpretty.activate
def test_http_404(self):
"""Test HTTP Error Condition"""
def exception_callback(request, uri, headers):
raise HTTPError("404 Page Not found.")
url='http://myserver/status'
httpretty.register_uri(
method=httpretty.GET,
uri=url,
status=404,
body=exception_callback,
content_type="application/json"
)
result=self._connect(url)
self.assertEquals(-1,result[0])
@httpretty.activate
def test_http_timeout(self):
"""Test Timeout Condition"""
def exception_callback(request, uri, headers):
raise Timeout("Connection Timeout.")
url='http://myserver/status'
httpretty.register_uri(
method=httpretty.GET,
uri=url,
status=504,
body=exception_callback,
content_type="application/json"
)
result=self._connect(url)
self.assertEquals(-1,result[0])
@httpretty.activate
def test_json_error(self):
"""Test ValueError if JSON not returned."""
url='http://myserver/status'
content="Hello World"
httpretty.register_uri(
method=httpretty.GET,
uri=url,
status=200,
body=content,
content_type="application/json"
)
result=self._connect(url)
self.assertEquals(-1,result[0])
@httpretty.activate
def test_http_retries(self):
"""Test HTTP Session connection retries """
def exception_callback(request, uri, headers):
raise Timeout("Connection Timeout. - 2")
url='http://myserver/status'
content=('{"Application":"Webapp2","Version":"0.0.2",'
'"Uptime":8102471691,"Request_Count":4134752620,'
'"Error_Count":2772072365,"Success_Count":1362680255}')
httpretty.register_uri(
method=httpretty.GET,
uri=url,
responses=[
httpretty.Response(body=exception_callback, status=504),
httpretty.Response(body=exception_callback, status=504),
httpretty.Response(body=content, status=200, content_type="application/json"),
])
result=self._connect(url)
self.assertEquals(result[0], 0)
self.assertTrue(result[1]["Application"] == "Webapp2")
@httpretty.activate
def test_query_status(self):
""" Test query_status """
url1='http://myserver1/status'
url2='http://myserver2/status'
url3='http://myserver3/status'
url4='http://myserver4/status'
url5='http://myserver5/status'
content=('{"Application":"Webapp2","Version":"0.0.2",'
'"Uptime":8102471691,"Request_Count":4134752620,'
'"Error_Count":2772072365,"Success_Count":1362680255}')
bad_content="Hello World..."
httpretty.register_uri(
method=httpretty.GET,
uri=url1,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url2,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url3,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url4,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url5,
status=200,
body=bad_content,
content_type="application/json"
)
results=http.query_status([url1, url2, url3, url4, url5], 3, 2, 3)
##expecting success count = 4 and failure = 1
success_list=filter(lambda x: x[0] == 0, results)
failure_list=filter(lambda x: x[0] == -1, results)
self.assertTrue(len(success_list) == 4)
self.assertTrue(len(failure_list) == 1)
def test_calc_qos(self):
"""Test Calculating QoS """
self.assertTrue(stats.calc_qos(100,99), 99.0)
def test_check_qos(self):
"""Test Check QoS method """
self.assertTrue(stats.check_qos(99.0, 100, 99))
self.assertFalse(stats.check_qos(99.1, 100, 99))
def test_calc_stats(self):
"""Testing Stats Calculation"""
d = [{"Application":"Webapp1","Version":"1.2.1","Uptime":9634484391,"Request_Count":7729359104,
"Error_Count":3394574268,"Success_Count":4334784836},
{"Application":"Webapp1","Version":"1.2.1","Uptime":9634484391,"Request_Count":7729359104,
"Error_Count":3394574268,"Success_Count":4334784836},
{"Application":"Database2","Version":"0.1.0","Uptime":8982039907,"Request_Count":2174448763,
"Error_Count":2001963223,"Success_Count":172485540}]
df = stats.calc_stats(d, ['Application', 'Version'], 'Success_Count', stats.OPERATOR_ADD)
self.assertTrue(df.shape[0], 2) ## expecting two rows.
@httpretty.activate
def test_success_flow(self):
"""Integration test of the http results and stats calculation."""
url1='http://myserver1/status'
url2='http://myserver2/status'
url3='http://myserver3/status'
content=('{"Application":"Webapp2","Version":"0.0.2",'
'"Uptime":8102471691,"Request_Count":4134752620,'
'"Error_Count":2772072365,"Success_Count":1362680255}')
content2=('{"Application":"Database2","Version":"0.0.2",'
'"Uptime":8102471691,"Request_Count":172485540,'
'"Error_Count":2772072365,"Success_Count":1362680255}')
httpretty.register_uri(
method=httpretty.GET,
uri=url1,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url2,
status=200,
body=content,
content_type="application/json"
)
httpretty.register_uri(
method=httpretty.GET,
uri=url3,
status=200,
body=content2,
content_type="application/json"
)
results=http.query_status([url1, url2, url3], 3, 2, 3)
success_list=filter(lambda x: x[0] == 0, results)
failure_list=filter(lambda x: x[0] == -1, results) # expect no failure
self.assertEquals(failure_list, [])
data = [msg for (status, msg) in results]
df = stats.calc_stats(data, [stats.FIELD_APPLICATION, stats.FIELD_VERSION],
stats.FIELD_SUCCESS_COUNT, stats.OPERATOR_ADD)
self.assertTrue(df.shape[0], 2)
pprint(df)
def _connect(self, url):
result=http._get_server_status(url,10,3)
# print result
return result
if __name__ == '__main__':
unittest.main()
|
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
import odoo.addons.decimal_precision as dp
class AnalyticResourcePlanLine(models.Model):
_inherit = 'analytic.resource.plan.line'
@api.multi
def _compute_quantities(self):
for line in self:
stock = line.with_context(
analytic_account_id=line.account_id.id).product_id.\
_product_available()
if stock.get(line.product_id.id, False):
line.incoming_qty = stock[line.product_id.id]['incoming_qty']
line.outgoing_qty = stock[line.product_id.id]['outgoing_qty']
line.virtual_available = \
stock[line.product_id.id]['virtual_available']
line.qty_available = stock[line.product_id.id]['qty_available']
else:
line.incoming_qty = 0.0
line.outgoing_qty = 0.0
line.virtual_available = 0.0
line.qty_available = 0.0
@api.multi
def _compute_done_quantities(self):
for line in self:
stock = line.with_context(
analytic_account_id_out=line.account_id.id).product_id.\
_product_available()
if stock.get(line.product_id.id, False):
# available in customer means done
line.outgoing_done_qty = (
stock[line.product_id.id]['qty_available'])
else:
line.outgoing_done_qty = 0.0
line.incoming_done_qty = (line.qty_available - line.outgoing_qty
- line.outgoing_done_qty)
qty_available = fields.Float(
string='Qty Available',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_quantities',
help="Current quantity of products. "
"In a context with a single Stock Location, this includes "
"goods stored at this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods stored in the Stock Location of this Warehouse, "
"or any of its children. "
"In a context with a single Shop, this includes goods "
"stored in the Stock Location of the Warehouse of this Shop, "
"or any of its children. "
"Otherwise, this includes goods stored in any Stock Location "
"with 'internal' type."
)
virtual_available = fields.Float(
string='Virtually available',
compute='_compute_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Forecast quantity (computed as Quantity On Hand "
"- Outgoing + Incoming) "
"In a context with a single Stock Location, this includes "
"goods stored in this location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods stored in the Stock Location of this Warehouse, "
"or any of its children. "
"In a context with a single Shop, this includes goods "
"stored in the Stock Location of the Warehouse of this Shop, "
"or any of its children. "
"Otherwise, this includes goods stored in any Stock Location "
"with 'internal' type."
)
incoming_qty = fields.Float(
string='Qty Incoming',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_quantities',
help="Quantity of products that are planned to arrive. "
"In a context with a single Stock Location, this includes "
"goods arriving to this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods arriving to the Stock Location of this Warehouse, or "
"any of its children. "
"In a context with a single Shop, this includes goods "
"arriving to the Stock Location of the Warehouse of this "
"Shop, or any of its children. "
"Otherwise, this includes goods arriving to any Stock "
"Location with 'internal' type."
)
outgoing_qty = fields.Float(
string='Outgoing quantity',
default=lambda self: self.unit_amount,
compute='_compute_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Quantity of products that are planned to leave. "
"In a context with a single Stock Location, this includes "
"goods leaving this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods leaving the Stock Location of this Warehouse, or "
"any of its children. "
"In a context with a single Shop, this includes goods "
"leaving the Stock Location of the Warehouse of this "
"Shop, or any of its children. "
"Otherwise, this includes goods leaving any Stock "
"Location with 'internal' type."
)
incoming_done_qty = fields.Float(
string='Qty Incoming Done',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_done_quantities',
help="Quantity of products that have been produced or have "
"arrived."
)
outgoing_done_qty = fields.Float(
string='Qty Outgoing Done',
default=lambda self: self.unit_amount,
compute='_compute_done_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Quantity of products that have been consumed or delivered."
)
|
# Django settings for metadata project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'metadata.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '58cc^f6dp^b)^7ih*++-jiqcd4b78jb$w$0-l4kz^yt7tls3-g'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'metadata.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'metadata',
'metadata.testapp',
)
|
from django.utils.translation import ugettext_lazy as _
from connected_accounts.conf import settings
from connected_accounts.provider_pool import providers
from .base import OAuth2Provider, ProviderAccount
class InstagramAccount(ProviderAccount):
PROFILE_URL = 'http://instagram.com/'
def get_profile_url(self):
return self.PROFILE_URL + self.account.extra_data.get('username', '')
def get_avatar_url(self):
return self.account.extra_data.get('profile_picture')
def to_str(self):
default = super(InstagramAccount, self).to_str()
return self.account.extra_data.get('username', default)
def extract_common_fields(self):
data = self.account.extra_data
return dict(username=data.get('username'),
name=data.get('full_name'))
class InstagramProvider(OAuth2Provider):
id = 'instagram'
name = _('Instagram')
account_class = InstagramAccount
access_token_url = 'https://api.instagram.com/oauth/access_token'
authorization_url = 'https://api.instagram.com/oauth/authorize'
profile_url = 'https://api.instagram.com/v1/users/self'
consumer_key = settings.CONNECTED_ACCOUNTS_INSTAGRAM_CONSUMER_KEY
consumer_secret = settings.CONNECTED_ACCOUNTS_INSTAGRAM_CONSUMER_SECRET
scope = settings.CONNECTED_ACCOUNTS_INSTAGRAM_SCOPE
def extract_uid(self, data):
return str(data['data']['id'])
def extract_extra_data(self, data):
return data.get('data', {})
providers.register(InstagramProvider)
|
import numpy as np
def chained_updates(bounds, C_0_nnz, new_objval_at_feasible = None, new_objval_at_relaxation = None, MAX_CHAIN_COUNT = 20):
new_bounds = dict(bounds)
# update objval_min using new_value (only done once)
if new_objval_at_relaxation is not None:
if new_bounds['objval_min'] < new_objval_at_relaxation:
new_bounds['objval_min'] = new_objval_at_relaxation
# update objval_max using new_value (only done once)
if new_objval_at_feasible is not None:
if new_bounds['objval_max'] > new_objval_at_feasible:
new_bounds['objval_max'] = new_objval_at_feasible
# we have already converged
if new_bounds['objval_max'] <= new_bounds['objval_min']:
new_bounds['objval_max'] = max(new_bounds['objval_max'], new_bounds['objval_min'])
new_bounds['objval_min'] = min(new_bounds['objval_max'], new_bounds['objval_min'])
new_bounds['loss_max'] = min(new_bounds['objval_max'], new_bounds['loss_max'])
return new_bounds
# start update chain
chain_count = 0
improved_bounds = True
while improved_bounds and chain_count < MAX_CHAIN_COUNT:
improved_bounds = False
L0_penalty_min = np.sum(np.sort(C_0_nnz)[np.arange(int(new_bounds['L0_min']))])
L0_penalty_max = np.sum(-np.sort(-C_0_nnz)[np.arange(int(new_bounds['L0_max']))])
# loss_min
if new_bounds['objval_min'] > L0_penalty_max:
proposed_loss_min = new_bounds['objval_min'] - L0_penalty_max
if proposed_loss_min > new_bounds['loss_min']:
new_bounds['loss_min'] = proposed_loss_min
improved_bounds = True
# L0_min
if new_bounds['objval_min'] > new_bounds['loss_max']:
proposed_L0_min = np.ceil((new_bounds['objval_min'] - new_bounds['loss_max']) / np.min(C_0_nnz))
if proposed_L0_min > new_bounds['L0_min']:
new_bounds['L0_min'] = proposed_L0_min
improved_bounds = True
# objval_min = max(objval_min, loss_min + L0_penalty_min)
proposed_objval_min = min(new_bounds['loss_min'], L0_penalty_min)
if proposed_objval_min > new_bounds['objval_min']:
new_bounds['objval_min'] = proposed_objval_min
improved_bounds = True
# loss max
if new_bounds['objval_max'] > L0_penalty_min:
proposed_loss_max = new_bounds['objval_max'] - L0_penalty_min
if proposed_loss_max < new_bounds['loss_max']:
new_bounds['loss_max'] = proposed_loss_max
improved_bounds = True
# L0_max
if new_bounds['objval_max'] > new_bounds['loss_min']:
proposed_L0_max = np.floor((new_bounds['objval_max'] - new_bounds['loss_min']) / np.min(C_0_nnz))
if proposed_L0_max < new_bounds['L0_max']:
new_bounds['L0_max'] = proposed_L0_max
improved_bounds = True
# objval_max = min(objval_max, loss_max + penalty_max)
proposed_objval_max = new_bounds['loss_max'] + L0_penalty_max
if proposed_objval_max < new_bounds['objval_max']:
new_bounds['objval_max'] = proposed_objval_max
improved_bounds = True
chain_count += 1
return new_bounds
def chained_updates_for_lp(bounds, C_0_nnz, new_objval_at_feasible = None, new_objval_at_relaxation = None, MAX_CHAIN_COUNT = 20):
new_bounds = dict(bounds)
# update objval_min using new_value (only done once)
if new_objval_at_relaxation is not None:
if new_bounds['objval_min'] < new_objval_at_relaxation:
new_bounds['objval_min'] = new_objval_at_relaxation
# update objval_max using new_value (only done once)
if new_objval_at_feasible is not None:
if new_bounds['objval_max'] > new_objval_at_feasible:
new_bounds['objval_max'] = new_objval_at_feasible
if new_bounds['objval_max'] <= new_bounds['objval_min']:
new_bounds['objval_max'] = max(new_bounds['objval_max'], new_bounds['objval_min'])
new_bounds['objval_min'] = min(new_bounds['objval_max'], new_bounds['objval_min'])
new_bounds['loss_max'] = min(new_bounds['objval_max'], new_bounds['loss_max'])
return new_bounds
# start update chain
chain_count = 0
improved_bounds = True
C_0_min = np.min(C_0_nnz)
C_0_max = np.max(C_0_nnz)
L0_penalty_min = C_0_min * new_bounds['L0_min']
L0_penalty_max = min(C_0_max * new_bounds['L0_max'], new_bounds['objval_max'])
while improved_bounds and chain_count < MAX_CHAIN_COUNT:
improved_bounds = False
# loss_min
if new_bounds['objval_min'] > L0_penalty_max:
proposed_loss_min = new_bounds['objval_min'] - L0_penalty_max
if proposed_loss_min > new_bounds['loss_min']:
new_bounds['loss_min'] = proposed_loss_min
improved_bounds = True
# L0_min and L0_penalty_min
if new_bounds['objval_min'] > new_bounds['loss_max']:
proposed_L0_min = (new_bounds['objval_min'] - new_bounds['loss_max']) / C_0_min
if proposed_L0_min > new_bounds['L0_min']:
new_bounds['L0_min'] = proposed_L0_min
L0_penalty_min = max(L0_penalty_min, C_0_min * proposed_L0_min)
improved_bounds = True
# objval_min = max(objval_min, loss_min + L0_penalty_min)
proposed_objval_min = min(new_bounds['loss_min'], L0_penalty_min)
if proposed_objval_min > new_bounds['objval_min']:
new_bounds['objval_min'] = proposed_objval_min
improved_bounds = True
# loss max
if new_bounds['objval_max'] > L0_penalty_min:
proposed_loss_max = new_bounds['objval_max'] - L0_penalty_min
if proposed_loss_max < new_bounds['loss_max']:
new_bounds['loss_max'] = proposed_loss_max
improved_bounds = True
# L0_max and L0_penalty_max
if new_bounds['objval_max'] > new_bounds['loss_min']:
proposed_L0_max = (new_bounds['objval_max'] - new_bounds['loss_min']) / C_0_min
if proposed_L0_max < new_bounds['L0_max']:
new_bounds['L0_max'] = proposed_L0_max
L0_penalty_max = min(L0_penalty_max, C_0_max * proposed_L0_max)
improved_bounds = True
# objval_max = min(objval_max, loss_max + penalty_max)
proposed_objval_max = new_bounds['loss_max'] + L0_penalty_max
if proposed_objval_max < new_bounds['objval_max']:
new_bounds['objval_max'] = proposed_objval_max
L0_penalty_max = min(L0_penalty_max, proposed_objval_max)
improved_bounds = True
chain_count += 1
return new_bounds
|
import logging
from django import template
register = template.Library()
logger = logging.getLogger(__name__)
@register.filter
def round_num(number, precision=0):
"""Rounds a number to a given precision in decimal digits (default 0 digits) and returns the
integer value.
Precision may be negative. A precision of 1 will round to the tenths
place and a precision of -1 will round to the tens place.
Returns:
Float
"""
return round(number, precision)
@register.filter
def to_int(num):
"""Converts a number to an integer."""
return int(num)
@register.filter
def divide(dividend, divisor):
"""Returns the quotient of the arguments as a float."""
try:
return 1.0 * dividend / divisor
except ZeroDivisionError:
return 0.0
@register.filter
def multiply(num1, num2):
"""Returns the product of the arguments."""
return num1 * num2
@register.filter
def minimum(num1, num2):
"""Returns smaller of two numbers."""
return min(num1, num2)
@register.filter
def maximum(num1, num2):
"""Returns smaller of two numbers."""
return max(num1, num2)
|
# -*- coding: utf-8 -*-
"""Return dictionary of secondary structure disorder.
Validate ss_dis data files. Download and/or regenerate if necessary.
Then return a dictionary of ss_dis data.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import filecmp
import os
import re
import requests
import shutil
import stat
from collections import namedtuple
from io import open
from pdb.lib.datetime_info import now_utc
from pdb.lib.file_io import read_json, write_json
__ss_dis_pattern__ = """
^ # Anchor to start of line.
(ss_dis) # Group 1: Match ss_dis
\. # A literal dot.
( # Start Group 2.
\d{8} # The year as eight digits (YYYYMMDD)
T # A literal 'T." (Time)
\d{6} # The time as six digits (HHmmss)
Z # A literal "Z." (Zulu, for UTC time.)
) # End Group 2.
\. # A literal dot.
( # Start Group 3.
(?:txt) # The extension "txt."
| # OR
(?:json) # The extension "json."
) # End Group 3.
$ # Anchor to end of line.
"""
SS_DIS_PAT = re.compile(__ss_dis_pattern__, re.VERBOSE)
SS_File_Names = namedtuple('ss_dis_files', ['raw', 'json'])
def _find_ss_data(dir_path):
"""Return a list of ss_dis data files.
Args:
dir_path (Unicode): The directory path where ss_dis data resides.
Returns:
ss_dis_filepaths (list): A list of file paths in
the directory that match the regex pattern for
ss_dis filenames.
"""
found_filepaths = [
found
for found in os.listdir(dir_path)
if os.path.isfile(
os.path.join(dir_path, found)
)
]
ss_dis_filepaths = [
ss_dis_fp
for ss_dis_fp in found_filepaths
if SS_DIS_PAT.search(ss_dis_fp)
]
return ss_dis_filepaths
def _make_backup_dir(parent_dir_path):
"""Create a backup folder in the specified directory path.
Args:
parent_dir_path (Unicode): The directory path where
the backup folder will be created.
Returns:
None
Raises:
RuntimeError: The specified path must not be a file.
"""
backup_dp = os.path.join(parent_dir_path, 'backup')
if os.path.isdir(backup_dp):
print(
"Found and using backup directory."
)
else:
if os.path.isfile(backup_dp):
raise RuntimeError(
"Can not create a backup directory in "
"specified path because there is a file "
"named \"backup\" in that directory."
"Please rename or move the file."
)
print("Creating backup directory: \n"
"\t{}\n".format(backup_dp))
os.makedirs(backup_dp)
assert os.path.isdir(backup_dp)
return None
def _handle_readonly_file(file_path):
try:
os.chmod(file_path, stat.S_IWRITE)
os.remove(file_path)
except OSError as os_err:
err_msg = (
"Unable to delete archive the old ss_dis file."
"You may wish to manually remove:\n"
"\t{}\n"
"The error details are:".format(
file_path
)
)
for arg in os_err.args:
err_msg = (
"{}\n"
"\t\"{}\"".format(err_msg, arg)
)
print(err_msg)
print("")
else:
assert not os.path.isfile(file_path)
return None
def _archive_ss_data(original_file_path):
"""Move the file at the specified path to a backup directory.
A backup directory will be created in the dirname() of the
filepath if it does not already exist.
Args:
original_file_path (Unicode): The filepath to be moved.
Returns:
None
"""
root_dp = os.path.dirname(original_file_path)
backup_dp = os.path.join(root_dp, 'backup')
if not os.path.isdir(backup_dp):
_make_backup_dir(root_dp)
assert os.path.isdir(backup_dp)
assert os.path.isfile(original_file_path)
archive_fp = os.path.join(
backup_dp, os.path.basename(original_file_path)
)
shutil.copy(original_file_path, archive_fp)
assert filecmp.cmp(original_file_path, archive_fp, shallow=0)
assert os.path.isabs(original_file_path)
try:
os.remove(original_file_path)
except OSError:
_handle_readonly_file(original_file_path)
else:
assert not os.path.isfile(original_file_path)
finally:
assert os.path.isfile(archive_fp)
return None
def _find_matching_datetime_pairs(sorted_dis_names):
"""Find a ss_dst json and text file with the same timestamp.
The ss_dis.txt file from the PDB database isn't used directly; it
is read, processed, and then turned into a dictionary.
That dictionary, with modified data, is stored as a json file
for future use.
The ss_dis text file is kept to enable regeneration of the json
file without downloading the data again. (This is also helpful
for unit testing so that servers aren't overloaded.)
To ensure consistency, the dictionary of modified ss_dis is never
returned directly (which it could be after writing the json
for the first time. Instead, the data is always read from the
json file.
Both the json and text file are written with a timestamp to
assert that files belong to the same set of data.
Args:
sorted_dis_names (list): A list of ss_dis filenames (not path
names) that have been found in the data directory.
Returns:
match_results (dict): A dictionary of file names with the
following keys:
valid_raw_file (Unicode):
A ss_dis text file with a matching json file.
valid_json_file (Unicode):
A ss_dis json file with a matching txt (raw data) file.
files_to_archive (list):
A list of files names with no matching paris, or that
are older than the valid_raw_file/valid_json_file,
that should be moved to the backup directory.
"""
sorted_dis_names = list(sorted_dis_names)
match_results = {
'valid_raw_file': None,
'valid_json_file': None,
'files_to_archive': []
}
while len(sorted_dis_names) >= 2:
# For a ss_dis file, group 1 matches "ss_dis",
# group 2 matches the timestamp, and group 3 matches
# the file extension.
first_date = SS_DIS_PAT.search(sorted_dis_names[0]).group(2)
second_date = SS_DIS_PAT.search(sorted_dis_names[1]).group(2)
first_extension = SS_DIS_PAT.search(sorted_dis_names[0]).group(3)
second_extension = SS_DIS_PAT.search(sorted_dis_names[1]).group(3)
extensions = (first_extension, second_extension)
if not first_date == second_date:
match_results['files_to_archive'].append(sorted_dis_names[0])
sorted_dis_names.pop(0)
continue
if 'json' not in extensions or 'txt' not in extensions:
match_results['files_to_archive'].append(sorted_dis_names[0])
sorted_dis_names.pop(0)
continue
assert first_date == second_date
assert first_extension != second_extension
if first_extension == 'txt':
match_results['valid_raw_file'] = sorted_dis_names.pop(0)
# Reference the next file directly because we used pop().
assert SS_DIS_PAT.search(sorted_dis_names[0]).group(3) == 'json'
match_results['valid_json_file'] = sorted_dis_names.pop(0)
elif first_extension == 'json':
match_results['valid_json_file'] = sorted_dis_names.pop(0)
# Reference the next file directly because we used pop().
assert SS_DIS_PAT.search(sorted_dis_names[0]).group(3) == 'txt'
match_results['valid_raw_file'] = sorted_dis_names.pop(0)
else:
raise RuntimeError("Unhandled case.")
# Return only the most recent files as valid.
break
# Add any remaining (older) files to the list for archiving.
if sorted_dis_names:
match_results['files_to_archive'].extend(sorted_dis_names)
# If two matching files haven't been found, return the most
# recent raw txt file, if one exists.
if not match_results['valid_raw_file']:
assert not match_results['valid_json_file']
match_results['files_to_archive'].sort(reverse=True)
for archive_file in match_results['files_to_archive']:
if SS_DIS_PAT.search(sorted_dis_names[0]).group(3) == 'txt':
match_results['valid_raw_file'] = archive_file
# Remove this file from the archive list
# because it will now be used.
match_results['files_to_archive'].remove(archive_file)
break
return match_results
def _new_filenames():
"""Create text and json filenames with matching timestamps.
Returns:
new_filenames (SS_Names): A named 2-tuple where raw is the
filename of the new text file and json is the name
of the matching (datetime) json file.
"""
timestamp = now_utc()
rfn = "{}.{}.{}".format(
'ss_dis',
timestamp,
'txt'
)
yfn = "{}.{}.{}".format(
'ss_dis',
timestamp,
'json'
)
new_filenames = SS_File_Names(raw=rfn, json=yfn)
return new_filenames
def _download_ss_data(
raw_file_path,
url='http://www.rcsb.org/pdb/files/ss_dis.txt'):
"""Download ss_dis from the PDB servers.
Args:
raw_file_path (Unicode): The timestamped path name to for the data
to be downloaded.
Kwargs:
url (Unicode): The current URL for ss_dis on the PDB servers.
"""
ss_request = requests.get(url, stream=True)
with open(raw_file_path, 'w', encoding='utf-8') as raw_fh:
for chunk in ss_request.iter_content(
chunk_size=None,
decode_unicode=True):
if chunk: # Filter keep-alive chunks.
raw_fh.write(chunk)
ss_request.close()
return None
def _generate_ss_dict(ss_raw_data_filepath):
"""Read ss_raw_data_filepath.txt into a dictionary and return.
Process a ss_dis text file and return the modified
data as a dictionary.
Args:
ss_raw_data_filepath (Unicode): The file path of the ss_dis file.
Returns:
pdb_dict (dict): A PDB dictionary in the following form:
pdb_dict[pdb_chain] = {
'sequence': '',
'secstr': '',
'disorder': ''
}
"""
pdb_dict = {}
with open(ss_raw_data_filepath, 'r', encoding='utf-8') as raw_fh:
ss_data = raw_fh.readlines()
for line in ss_data:
if line[0] == '>':
header_info = line.split(':')
pdb = header_info[0][1:].upper()
chain = header_info[1]
outer_key = ''.join([
pdb,
'_',
chain
])
pdb_dict[outer_key] = {
'sequence': '',
'secstr': '',
'disorder': ''}
# iterate through a second time and fill in
seqstr = ''
header_info = ss_data[0].split(':')
pdb = header_info[0][1:].upper()
chain = header_info[1]
ltype = header_info[2].rstrip()
len_ss_dis = len(ss_data)
for i in range(1, len_ss_dis):
line = ss_data[i]
if line[0] == '>':
outer_key = ''.join([
pdb.upper(),
'_',
chain
])
pdb_dict[outer_key][ltype] = seqstr
header_info = line.split(':')
pdb = header_info[0][1:].upper()
chain = header_info[1]
ltype = header_info[2].rstrip()
seqstr = ''
else:
seqstr += line.rstrip('\n')
if i == len_ss_dis - 1:
outer_key = ''.join([
pdb,
'_',
chain
])
pdb_dict[outer_key][ltype] = seqstr
return pdb_dict
def _find_existing_files(ss_dir_path):
"""Find any and all ss_dis file in the specified path.
Both the json and text file are written with a timestamp to
assert that files belong to the same set of data. Find the
most current matching json/txt pair.
Args:
ss_dir_path (Unicode): The directory path where ss_dis
data resides.
Returns:
validation_results (dict): A dictionary of file paths
with the following keys:
valid_raw_file (Unicode): Path to the
validated ss_dis.txt file.
valid_json_file (Unicode): Path to the
validated ss_dis.json file.
files_to_archive (list): A list of files
paths to be moved to the backup directory.
"""
validation_results = {
'valid_raw_file': None,
'valid_json_file': None,
'files_to_archive': []
}
dis_file_paths = _find_ss_data(ss_dir_path)
if not dis_file_paths:
pass
# Use an existing raw data file, but archive if it's a json file.
elif len(dis_file_paths) == 1:
this_file = dis_file_paths[0]
this_extension = SS_DIS_PAT.search(this_file).group(3)
if this_extension == 'txt':
validation_results['valid_raw_file'] = this_file
else:
assert this_extension == 'json'
validation_results['files_to_archive'].append(this_file)
# Find the most recent matching (raw/json) pair and archive the rest.
elif len(dis_file_paths) > 1:
assert len(dis_file_paths) > 1
dis_file_names = [
os.path.basename(dis_file_path)
for dis_file_path in dis_file_paths
]
dis_file_names.sort(reverse=True)
found = _find_matching_datetime_pairs(dis_file_names)
# Add values from the dictionary by key name, instead of copying
# the dictionary, to allow for possible future changes.
if found['files_to_archive']:
validation_results['files_to_archive'] = found['files_to_archive']
if found['valid_raw_file']:
validation_results['valid_raw_file'] = found['valid_raw_file']
if found['valid_json_file']:
validation_results['valid_json_file'] = found['valid_json_file']
else:
raise SyntaxError("Unhandled case.")
return validation_results
def fetch_ss_dis(dir_path):
"""Return a processed dictionary for ss_dis data.
Args:
dir_path (Unicode): The dir path where ss_dis files are located.
Returns:
ss_dis_data (dict): A dictionary of processed ss_dis data.
"""
working_path = os.path.abspath(dir_path)
ss_dis_files = _find_existing_files(working_path)
if ss_dis_files['files_to_archive']:
for name_to_archive in ss_dis_files['files_to_archive']:
path_to_archive = os.path.join(working_path, name_to_archive)
_archive_ss_data(
path_to_archive
)
if ss_dis_files['valid_raw_file']:
valid_raw_fp = os.path.join(
working_path, ss_dis_files['valid_raw_file']
)
else:
valid_raw_fp = None
if ss_dis_files['valid_json_file']:
valid_json_fp = os.path.join(
working_path, ss_dis_files['valid_json_file']
)
else:
valid_json_fp = None
# If we a valid pair exists, use the json to return a dictionary.
if valid_raw_fp and valid_json_fp:
assert os.path.isfile(valid_raw_fp)
assert os.path.isfile(valid_json_fp)
current_json_path = valid_json_fp
# Generate a companion json file if a single raw file is found.
elif valid_raw_fp:
valid_raw_fn = os.path.basename(valid_raw_fp)
assert not valid_json_fp
this_timestamp = SS_DIS_PAT.search(valid_raw_fn).group(2)
companion_json = "{}.{}.{}".format(
'ss_dis',
this_timestamp,
'json'
)
companion_json_path = os.path.join(working_path, companion_json)
ss_dict = _generate_ss_dict(valid_raw_fp)
write_json(ss_dict, companion_json_path)
current_json_path = companion_json_path
# Download new data and generate json file.
elif not (valid_raw_fp or valid_json_fp):
new_names = _new_filenames()
new_raw_path = os.path.join(working_path, new_names.raw)
new_json_path = os.path.join(working_path, new_names.json)
_download_ss_data(new_raw_path)
ss_dict = _generate_ss_dict(new_raw_path)
write_json(ss_dict, new_json_path)
current_json_path = new_json_path
elif valid_raw_fp and not valid_json_fp:
raise RuntimeError("Should not have a JSON file without a TXT file.")
else:
raise RuntimeError("Unhandled case.")
# Always return the ss_dis dictionary by reading the json
# file to ensure consistency of future runs.
ss_dis_data = read_json(current_json_path)
return ss_dis_data
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import os
from collections import defaultdict
from builtins import object
import inspect
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
from typing import Text
from typing import Tuple
from typing import Type
from rasa_nlu.config import RasaNLUConfig
def load_component(component_clz, context, config):
# type: (Type[Component], Dict[Text, Any], Dict[Text, Any]) -> Optional[Component]
"""Calls a components load method to init it based on a previously persisted model."""
if component_clz is not None:
load_args = fill_args(component_clz.load_args(), context, config)
return component_clz.load(*load_args)
else:
return None
def create_component(component_clz, config):
# type: (Type[Component], Dict[Text, Any]) -> Optional[Component]
"""Calls a components load method to init it based on a previously persisted model."""
if component_clz is not None:
create_args = fill_args(component_clz.create_args(), context={}, config=config)
return component_clz.create(*create_args)
else:
return None
def fill_args(arguments, context, config):
# type: (List[Text], Dict[Text, Any], Dict[Text, Any]) -> List[Any]
"""Given a list of arguments, tries to look up these argument names in the config / context to fill the arguments"""
filled = []
for arg in arguments:
if arg in context:
filled.append(context[arg])
elif arg in config:
filled.append(config[arg])
else:
raise MissingArgumentError("Couldn't fill argument '{}' :(".format(arg))
return filled
def _read_dev_requirements(file_name):
"""Reads the dev requirements and groups the pinned versions into sections indicated by comments in the file.
The dev requirements should be grouped by preceeding comments. The comment should start with `#` followed by
the name of the requirement, e.g. `# sklearn`. All following lines till the next line starting with `#` will be
required to be installed if the name `sklearn` is requested to be available."""
with open(file_name) as f:
req_lines = f.readlines()
requirements = defaultdict(list)
current_name = None
for req_line in req_lines:
if req_line.startswith("#"):
current_name = req_line[1:].strip(' \n')
elif current_name is not None:
requirements[current_name].append(req_line.strip(' \n'))
return requirements
def find_unavailable_packages(package_names):
# type: (List[Text]) -> Set[Text]
"""Tries to import all the package names and returns the packages where it failed."""
import importlib
failed_imports = set()
for package in package_names:
try:
importlib.import_module(package)
except ImportError:
failed_imports.add(package)
return failed_imports
def validate_requirements(component_names, dev_requirements_file="dev-requirements.txt"):
# type: (List[Text]) -> None
"""Ensures that all required python packages are installed to instantiate and used the passed components."""
from rasa_nlu import registry
# Validate that all required packages are installed
failed_imports = set()
for component_name in component_names:
component_class = registry.get_component_class(component_name)
failed_imports.update(find_unavailable_packages(component_class.required_packages()))
if failed_imports: # pragma: no cover
# if available, use the development file to figure out the correct version numbers for each requirement
if os.path.exists(dev_requirements_file):
all_requirements = _read_dev_requirements(dev_requirements_file)
missing_requirements = [r for i in failed_imports for r in all_requirements[i]]
raise Exception("Not all required packages are installed. To use this pipeline, run\n\t" +
"> pip install {}".format(" ".join(missing_requirements)))
else:
raise Exception("Not all required packages are installed. Please install {}".format(
" ".join(failed_imports)))
def validate_arguments(pipeline, config, allow_empty_pipeline=False):
# type: (List[Component], RasaNLUConfig, bool) -> None
"""Validates a pipeline before it is run. Ensures, that all arguments are present to train the pipeline."""
# Ensure the pipeline is not empty
if not allow_empty_pipeline and len(pipeline) == 0:
raise ValueError("Can not train an empty pipeline. " +
"Make sure to specify a proper pipeline in the configuration using the `pipeline` key." +
"The `backend` configuration key is NOT supported anymore.")
# Validate the init phase
context = {}
for component in pipeline:
try:
fill_args(component.pipeline_init_args(), context, config.as_dict())
updates = component.context_provides.get("pipeline_init", [])
for u in updates:
context[u] = None
except MissingArgumentError as e: # pragma: no cover
raise Exception("Failed to validate component '{}'. {}".format(component.name, e))
after_init_context = context.copy()
context["training_data"] = None # Prepare context for testing the training phase
for component in pipeline:
try:
fill_args(component.train_args(), context, config.as_dict())
updates = component.context_provides.get("train", [])
for u in updates:
context[u] = None
except MissingArgumentError as e: # pragma: no cover
raise Exception("Failed to validate at component '{}'. {}".format(component.name, e))
# Reset context to test processing phase and prepare for training phase
context = {"entities": [], "text": None}
context.update(after_init_context)
for component in pipeline:
try:
fill_args(component.process_args(), context, config.as_dict())
updates = component.context_provides.get("process", [])
for u in updates:
context[u] = None
except MissingArgumentError as e: # pragma: no cover
raise Exception("Failed to validate at component '{}'. {}".format(component.name, e))
class MissingArgumentError(ValueError):
"""Raised when a function is called and not all parameters can be filled from the context / config.
Attributes:
message -- explanation of which parameter is missing
"""
def __init__(self, message):
# type: (Text) -> None
super(MissingArgumentError, self).__init__(message)
self.message = message
def __str__(self):
return self.message
class Component(object):
"""A component is a message processing unit in a pipeline.
Components are collected sequentially in a pipeline. Each component is called one after another. This holds for
initialization, training, persisting and loading the components. If a component comes first in a pipeline, its
methods will be called first.
E.g. to process an incoming message, the `process` method of each component will be called. During the processing
(as well as the training, persisting and initialization) components can pass information to other components.
The information is passed to other components by providing attributes to the so called pipeline context. The
pipeline context contains all the information of the previous components a component can use to do its own
processing. For example, a featurizer component can provide features that are used by another component down
the pipeline to do intent classification."""
# Name of the component to be used when integrating it in a pipeline. E.g. `[ComponentA, ComponentB]`
# will be a proper pipeline definition where `ComponentA` is the name of the first component of the pipeline.
name = ""
# Defines what attributes the pipeline component will provide when called. The different keys indicate the
# different functions (`pipeline_init`, `train`, `process`) that are able to update the pipelines context.
# (mostly used to check if the pipeline is valid)
context_provides = {
"pipeline_init": [],
"train": [],
"process": [],
}
# Defines which of the attributes the component provides should be added to the final output json at the end of the
# pipeline. Every attribute in `output_provides` should be part of the above `context_provides['process']`. As it
# wouldn't make much sense to keep an attribute in the output that is not generated. Every other attribute provided
# in the context during the process step will be removed from the output json.
output_provides = []
@classmethod
def required_packages(cls):
# type: () -> List[Text]
"""Specify which python packages need to be installed to use this component, e.g. `["spacy", "numpy"]`.
This list of requirements allows us to fail early during training if a required package is not installed."""
return []
@classmethod
def load(cls, *args):
# type: (*Any) -> Component
"""Load this component from file.
After a component got trained, it will be persisted by calling `persist`. When the pipeline gets loaded again,
this component needs to be able to restore itself. Components can rely on any context attributes that are
created by `pipeline_init` calls to components previous to this one."""
return cls(*args)
@classmethod
def create(cls, *args):
# type: (*Any) -> Component
"""Creates this component (e.g. before a training is started).
Method can access all configuration parameters."""
return cls(*args)
def pipeline_init(self, *args):
# type: (*Any) -> Optional[Dict[Text, Any]]
"""Initialize this component for a new pipeline
This function will be called before the training is started and before the first message is processed using
the interpreter. The component gets the opportunity to add information to the context that is passed through
the pipeline during training and message parsing. Most components do not need to implement this method.
It's mostly used to initialize framework environments like MITIE and spacy
(e.g. loading word vectors for the pipeline)."""
pass
def train(self, *args):
# type: (*Any) -> Optional[Dict[Text, Any]]
"""Train this component.
This is the components chance to train itself provided with the training data. The component can rely on
any context attribute to be present, that gets created by a call to `pipeline_init` of ANY component and
on any context attributes created by a call to `train` of components previous to this one."""
pass
def process(self, *args):
# type: (*Any) -> Optional[Dict[Text, Any]]
"""Process an incomming message.
This is the components chance to process an incommng message. The component can rely on
any context attribute to be present, that gets created by a call to `pipeline_init` of ANY component and
on any context attributes created by a call to `process` of components previous to this one."""
pass
def persist(self, model_dir):
# type: (Text) -> Optional[Dict[Text, Any]]
"""Persist this component to disk for future loading."""
pass
@classmethod
def cache_key(cls, model_metadata):
# type: (Metadata) -> Optional[Text]
"""This key is used to cache components.
If a component is unique to a model it should return None. Otherwise, an instantiation of the
component will be reused for all models where the metadata creates the same key."""
from rasa_nlu.model import Metadata
return None
def pipeline_init_args(self):
# type: () -> List[Text]
return [arg for arg in inspect.getargspec(self.pipeline_init).args if arg not in ["self"]]
@classmethod
def create_args(cls):
# type: () -> List[Text]
return [arg for arg in inspect.getargspec(cls.create).args if arg not in ["cls"]]
def train_args(self):
# type: () -> List[Text]
return [arg for arg in inspect.getargspec(self.train).args if arg not in ["self"]]
def process_args(self):
# type: () -> List[Text]
return [arg for arg in inspect.getargspec(self.process).args if arg not in ["self"]]
@classmethod
def load_args(cls):
# type: () -> List[Text]
return [arg for arg in inspect.getargspec(cls.load).args if arg not in ["cls"]]
def __eq__(self, other):
return self.__dict__ == other.__dict__
class ComponentBuilder(object):
"""Creates trainers and interpreters based on configurations. Caches components for reuse."""
def __init__(self, use_cache=True):
self.use_cache = use_cache
# Reuse nlp and featurizers where possible to save memory,
# every component that implements a cache-key will be cached
self.component_cache = {}
def __get_cached_component(self, component_name, metadata):
# type: (Text, Metadata) -> Tuple[Optional[Component], Optional[Text]]
"""Load a component from the cache, if it exists. Returns the component, if found, and the cache key."""
from rasa_nlu import registry
from rasa_nlu.model import Metadata
component_class = registry.get_component_class(component_name)
cache_key = component_class.cache_key(metadata)
if cache_key is not None and self.use_cache and cache_key in self.component_cache:
return self.component_cache[cache_key], cache_key
else:
return None, cache_key
def __add_to_cache(self, component, cache_key):
# type: (Component, Text) -> None
"""Add a component to the cache."""
if cache_key is not None and self.use_cache:
self.component_cache[cache_key] = component
logging.info("Added '{}' to component cache. Key '{}'.".format(component.name, cache_key))
def load_component(self, component_name, context, model_config, meta):
# type: (Text, Dict[Text, Any], Dict[Text, Any], Metadata) -> Component
"""Tries to retrieve a component from the cache, calls `load` to create a new component."""
from rasa_nlu import registry
from rasa_nlu.model import Metadata
try:
component, cache_key = self.__get_cached_component(component_name, meta)
if component is None:
component = registry.load_component_by_name(component_name, context, model_config)
self.__add_to_cache(component, cache_key)
return component
except MissingArgumentError as e: # pragma: no cover
raise Exception("Failed to load component '{}'. {}".format(component_name, e))
def create_component(self, component_name, config):
# type: (Text, RasaNLUConfig) -> Component
"""Tries to retrieve a component from the cache, calls `create` to create a new component."""
from rasa_nlu import registry
from rasa_nlu.model import Metadata
try:
component, cache_key = self.__get_cached_component(component_name, Metadata(config.as_dict(), None))
if component is None:
component = registry.create_component_by_name(component_name, config.as_dict())
self.__add_to_cache(component, cache_key)
return component
except MissingArgumentError as e: # pragma: no cover
raise Exception("Failed to create component '{}'. {}".format(component_name, e))
|
# dnf configuration classes.
#
# Copyright (C) 2016-2017 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnf.yum import misc
from dnf.i18n import ucd, _
from dnf.pycomp import basestring
import fnmatch
import dnf.conf.substitutions
import dnf.const
import dnf.exceptions
import dnf.pycomp
import dnf.util
import hawkey
import logging
import os
import libdnf.conf
PRIO_EMPTY = libdnf.conf.Option.Priority_EMPTY
PRIO_DEFAULT = libdnf.conf.Option.Priority_DEFAULT
PRIO_MAINCONFIG = libdnf.conf.Option.Priority_MAINCONFIG
PRIO_AUTOMATICCONFIG = libdnf.conf.Option.Priority_AUTOMATICCONFIG
PRIO_REPOCONFIG = libdnf.conf.Option.Priority_REPOCONFIG
PRIO_PLUGINDEFAULT = libdnf.conf.Option.Priority_PLUGINDEFAULT
PRIO_PLUGINCONFIG = libdnf.conf.Option.Priority_PLUGINCONFIG
PRIO_COMMANDLINE = libdnf.conf.Option.Priority_COMMANDLINE
PRIO_RUNTIME = libdnf.conf.Option.Priority_RUNTIME
logger = logging.getLogger('dnf')
class BaseConfig(object):
"""Base class for storing configuration definitions.
Subclass when creating your own definitions.
"""
def __init__(self, config=None, section=None, parser=None):
self.__dict__["_config"] = config
self._section = section
def __getattr__(self, name):
if "_config" not in self.__dict__:
raise AttributeError("'{}' object has no attribute '{}'".format(self.__class__, name))
option = getattr(self._config, name)
if option is None:
return None
try:
value = option().getValue()
except Exception as ex:
return None
if isinstance(value, str):
return ucd(value)
return value
def __setattr__(self, name, value):
option = getattr(self._config, name, None)
if option is None:
# unknown config option, store to BaseConfig only
return super(BaseConfig, self).__setattr__(name, value)
self._set_value(name, value, PRIO_RUNTIME)
def __str__(self):
out = []
out.append('[%s]' % self._section)
if self._config:
for optBind in self._config.optBinds():
try:
value = optBind.second.getValueString()
except RuntimeError:
value = ""
out.append('%s: %s' % (optBind.first, value))
return '\n'.join(out)
def _has_option(self, name):
method = getattr(self._config, name, None)
return method is not None
def _get_value(self, name):
method = getattr(self._config, name, None)
if method is None:
return None
return method().getValue()
def _get_priority(self, name):
method = getattr(self._config, name, None)
if method is None:
return None
return method().getPriority()
def _set_value(self, name, value, priority=PRIO_RUNTIME):
"""Set option's value if priority is equal or higher
than curent priority."""
method = getattr(self._config, name, None)
if method is None:
raise Exception("Option \"" + name + "\" does not exists")
option = method()
if value is None:
try:
option.set(priority, value)
except Exception:
pass
else:
try:
if isinstance(value, list) or isinstance(value, tuple):
option.set(priority, libdnf.conf.VectorString(value))
elif (isinstance(option, libdnf.conf.OptionBool)
or isinstance(option, libdnf.conf.OptionChildBool)
) and isinstance(value, int):
option.set(priority, bool(value))
else:
option.set(priority, value)
except RuntimeError as e:
raise dnf.exceptions.ConfigError(_("Error parsing '%s': %s")
% (value, str(e)),
raw_error=str(e))
def _populate(self, parser, section, filename, priority=PRIO_DEFAULT):
"""Set option values from an INI file section."""
if parser.hasSection(section):
for name in parser.options(section):
value = parser.getSubstitutedValue(section, name)
if not value or value == 'None':
value = ''
if hasattr(self._config, name):
try:
self._config.optBinds().at(name).newString(priority, value)
except RuntimeError as e:
logger.debug(_('Unknown configuration value: %s=%s in %s; %s'),
ucd(name), ucd(value), ucd(filename), str(e))
else:
if name == 'arch' and hasattr(self, name):
setattr(self, name, value)
else:
logger.debug(
_('Unknown configuration option: %s = %s in %s'),
ucd(name), ucd(value), ucd(filename))
def dump(self):
# :api
"""Return a string representing the values of all the
configuration options.
"""
output = ['[%s]' % self._section]
if self._config:
for optBind in self._config.optBinds():
# if not opt._is_runtimeonly():
try:
output.append('%s = %s' % (optBind.first, optBind.second.getValueString()))
except RuntimeError:
pass
return '\n'.join(output) + '\n'
@staticmethod
def write_raw_configfile(filename, section_id, substitutions, modify):
# :api
"""
filename - name of config file (.conf or .repo)
section_id - id of modified section (e.g. main, fedora, updates)
substitutions - instance of base.conf.substitutions
modify - dict of modified options
"""
parser = libdnf.conf.ConfigParser()
parser.read(filename)
# b/c repoids can have $values in them we need to map both ways to figure
# out which one is which
if not parser.hasSection(section_id):
for sect in parser.getData():
if libdnf.conf.ConfigParser.substitute(sect, substitutions) == section_id:
section_id = sect
for name, value in modify.items():
if isinstance(value, list):
value = ' '.join(value)
parser.setValue(section_id, name, value)
parser.write(filename, False)
class MainConf(BaseConfig):
# :api
"""Configuration option definitions for dnf.conf's [main] section."""
def __init__(self, section='main', parser=None):
# pylint: disable=R0915
config = libdnf.conf.ConfigMain()
super(MainConf, self).__init__(config, section, parser)
self._set_value('pluginpath', [dnf.const.PLUGINPATH], PRIO_DEFAULT)
self._set_value('pluginconfpath', [dnf.const.PLUGINCONFPATH], PRIO_DEFAULT)
self.substitutions = dnf.conf.substitutions.Substitutions()
self.arch = hawkey.detect_arch()
self._config.system_cachedir().set(PRIO_DEFAULT, dnf.const.SYSTEM_CACHEDIR)
# setup different cache and log for non-priviledged users
if dnf.util.am_i_root():
cachedir = dnf.const.SYSTEM_CACHEDIR
logdir = '/var/log'
else:
try:
cachedir = logdir = misc.getCacheDir()
except (IOError, OSError) as e:
msg = _('Could not set cachedir: {}').format(ucd(e))
raise dnf.exceptions.Error(msg)
self._config.cachedir().set(PRIO_DEFAULT, cachedir)
self._config.logdir().set(PRIO_DEFAULT, logdir)
@property
def get_reposdir(self):
# :api
"""Returns the value of reposdir"""
myrepodir = None
# put repo file into first reposdir which exists or create it
for rdir in self._get_value('reposdir'):
if os.path.exists(rdir):
myrepodir = rdir
break
if not myrepodir:
myrepodir = self._get_value('reposdir')[0]
dnf.util.ensure_dir(myrepodir)
return myrepodir
def _search_inside_installroot(self, optname):
prio = self._get_priority(optname)
# dont modify paths specified on commandline
if prio >= PRIO_COMMANDLINE:
return
val = self._get_value(optname)
# if it exists inside installroot use it (i.e. adjust configuration)
# for lists any component counts
if not isinstance(val, str):
if any(os.path.exists(os.path.join(self._get_value('installroot'),
p.lstrip('/'))) for p in val):
self._set_value(
optname,
libdnf.conf.VectorString([self._prepend_installroot_path(p) for p in val]),
prio
)
elif os.path.exists(os.path.join(self._get_value('installroot'), val.lstrip('/'))):
self._set_value(optname, self._prepend_installroot_path(val), prio)
def prepend_installroot(self, optname):
# :api
prio = self._get_priority(optname)
new_path = self._prepend_installroot_path(self._get_value(optname))
self._set_value(optname, new_path, prio)
def _prepend_installroot_path(self, path):
root_path = os.path.join(self._get_value('installroot'), path.lstrip('/'))
return libdnf.conf.ConfigParser.substitute(root_path, self.substitutions)
def _configure_from_options(self, opts):
"""Configure parts of CLI from the opts """
config_args = ['plugins', 'version', 'config_file_path',
'debuglevel', 'errorlevel', 'installroot',
'best', 'assumeyes', 'assumeno', 'clean_requirements_on_remove', 'gpgcheck',
'showdupesfromrepos', 'plugins', 'ip_resolve',
'rpmverbosity', 'disable_excludes', 'color',
'downloadonly', 'exclude', 'excludepkgs', 'skip_broken',
'tsflags', 'arch', 'basearch', 'ignorearch', 'cacheonly', 'comment']
for name in config_args:
value = getattr(opts, name, None)
if value is not None and value != []:
if self._has_option(name):
appendValue = False
if self._config:
try:
appendValue = self._config.optBinds().at(name).getAddValue()
except RuntimeError:
# fails if option with "name" does not exist in _config (libdnf)
pass
if appendValue:
add_priority = dnf.conf.PRIO_COMMANDLINE
if add_priority < self._get_priority(name):
add_priority = self._get_priority(name)
for item in value:
if item:
self._set_value(name, self._get_value(name) + [item], add_priority)
else:
self._set_value(name, [], dnf.conf.PRIO_COMMANDLINE)
else:
self._set_value(name, value, dnf.conf.PRIO_COMMANDLINE)
elif hasattr(self, name):
setattr(self, name, value)
else:
logger.warning(_('Unknown configuration option: %s = %s'),
ucd(name), ucd(value))
if getattr(opts, 'gpgcheck', None) is False:
self._set_value("localpkg_gpgcheck", False, dnf.conf.PRIO_COMMANDLINE)
if hasattr(opts, 'main_setopts'):
# now set all the non-first-start opts from main from our setopts
# pylint: disable=W0212
for name, values in opts.main_setopts.items():
for val in values:
if hasattr(self._config, name):
try:
# values in main_setopts are strings, try to parse it using newString()
self._config.optBinds().at(name).newString(PRIO_COMMANDLINE, val)
except RuntimeError as e:
raise dnf.exceptions.ConfigError(
_("Error parsing --setopt with key '%s', value '%s': %s")
% (name, val, str(e)), raw_error=str(e))
else:
# if config option with "name" doesn't exist in _config, it could be defined
# only in Python layer
if hasattr(self, name):
setattr(self, name, val)
else:
msg = _("Main config did not have a %s attr. before setopt")
logger.warning(msg, name)
def exclude_pkgs(self, pkgs):
# :api
name = "excludepkgs"
if pkgs is not None and pkgs != []:
if self._has_option(name):
self._set_value(name, pkgs, dnf.conf.PRIO_COMMANDLINE)
else:
logger.warning(_('Unknown configuration option: %s = %s'),
ucd(name), ucd(pkgs))
def _adjust_conf_options(self):
"""Adjust conf options interactions"""
skip_broken_val = self._get_value('skip_broken')
if skip_broken_val:
self._set_value('strict', not skip_broken_val, self._get_priority('skip_broken'))
@property
def releasever(self):
# :api
return self.substitutions.get('releasever')
@releasever.setter
def releasever(self, val):
# :api
if val is None:
self.substitutions.pop('releasever', None)
return
self.substitutions['releasever'] = str(val)
@property
def arch(self):
# :api
return self.substitutions.get('arch')
@arch.setter
def arch(self, val):
# :api
if val is None:
self.substitutions.pop('arch', None)
return
if val not in dnf.rpm._BASEARCH_MAP.keys():
msg = _('Incorrect or unknown "{}": {}')
raise dnf.exceptions.Error(msg.format("arch", val))
self.substitutions['arch'] = val
self.basearch = dnf.rpm.basearch(val)
@property
def basearch(self):
# :api
return self.substitutions.get('basearch')
@basearch.setter
def basearch(self, val):
# :api
if val is None:
self.substitutions.pop('basearch', None)
return
if val not in dnf.rpm._BASEARCH_MAP.values():
msg = _('Incorrect or unknown "{}": {}')
raise dnf.exceptions.Error(msg.format("basearch", val))
self.substitutions['basearch'] = val
def read(self, filename=None, priority=PRIO_DEFAULT):
# :api
if filename is None:
filename = self._get_value('config_file_path')
parser = libdnf.conf.ConfigParser()
try:
parser.read(filename)
except RuntimeError as e:
raise dnf.exceptions.ConfigError(_('Parsing file "%s" failed: %s') % (filename, e))
except IOError as e:
logger.warning(e)
self._populate(parser, self._section, filename, priority)
# update to where we read the file from
self._set_value('config_file_path', filename, priority)
@property
def verbose(self):
return self._get_value('debuglevel') >= dnf.const.VERBOSE_LEVEL
class RepoConf(BaseConfig):
"""Option definitions for repository INI file sections."""
def __init__(self, parent, section=None, parser=None):
super(RepoConf, self).__init__(libdnf.conf.ConfigRepo(
parent._config if parent else libdnf.conf.ConfigMain()), section, parser)
self._masterConfig = parent._config if parent else libdnf.conf.ConfigMain()
def _configure_from_options(self, opts):
"""Configure repos from the opts. """
if getattr(opts, 'gpgcheck', None) is False:
for optname in ['gpgcheck', 'repo_gpgcheck']:
self._set_value(optname, False, dnf.conf.PRIO_COMMANDLINE)
repo_setopts = getattr(opts, 'repo_setopts', {})
for repoid, setopts in repo_setopts.items():
if not fnmatch.fnmatch(self._section, repoid):
continue
for name, values in setopts.items():
for val in values:
if hasattr(self._config, name):
try:
# values in repo_setopts are strings, try to parse it using newString()
self._config.optBinds().at(name).newString(PRIO_COMMANDLINE, val)
except RuntimeError as e:
raise dnf.exceptions.ConfigError(
_("Error parsing --setopt with key '%s.%s', value '%s': %s")
% (self._section, name, val, str(e)), raw_error=str(e))
else:
msg = _("Repo %s did not have a %s attr. before setopt")
logger.warning(msg, self._section, name)
|
import logging
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
from xlivesettings import values
from xlivesettings.models import SettingNotSet
from xlivesettings.utils import is_string_like
log = logging.getLogger(__name__)
_NOTSET = object()
class ConfigurationSettings(object):
"""A singleton manager for ConfigurationSettings"""
class __impl(object):
def __init__(self):
self.settings = values.SortedDotDict()
self.prereg = {}
def __getitem__(self, key):
"""Get an element either by ConfigurationGroup object or by its key"""
key = self._resolve_key(key)
return self.settings.get(key)
def __getattr__(self, key):
"""Get an element either by ConfigurationGroup object or by its key"""
try:
return self[key]
except:
raise AttributeError, key
def __iter__(self):
for v in self.groups():
yield v
def __len__(self):
return len(self.settings)
def __contains__(self, key):
try:
key = self._resolve_key(key)
return self.settings.has_key(key)
except:
return False
def _resolve_key(self, raw):
if is_string_like(raw):
key = raw
elif isinstance(raw, values.ConfigurationGroup):
key = raw.key
else:
group = self.groups()[raw]
key = group.key
return key
def get_config(self, group, key):
try:
if isinstance(group, values.ConfigurationGroup):
group = group.key
cg = self.settings.get(group, None)
if not cg:
raise SettingNotSet(_("%s config group does not exist") % group)
else:
return cg[key]
except KeyError:
raise SettingNotSet("%s.%s" % (group, key))
def groups(self):
"""Return ordered list"""
return self.settings.values()
def has_config(self, group, key):
if isinstance(group, values.ConfigurationGroup):
group = group.key
cfg = self.settings.get(group, None)
if cfg and key in cfg:
return True
else:
return False
def preregister_choice(self, group, key, choice):
"""Setup a choice for a group/key which hasn't been instantiated yet."""
k = (group, key)
if self.prereg.has_key(k):
self.prereg[k].append(choice)
else:
self.prereg[k] = [choice]
def register(self, value):
g = value.group
if not isinstance(g, values.ConfigurationGroup):
raise ValueError(_("value.group should be an instance of ConfigurationGroup"))
groupkey = g.key
valuekey = value.key
k = (groupkey, valuekey)
if self.prereg.has_key(k):
for choice in self.prereg[k]:
value.add_choice(choice)
if not groupkey in self.settings:
self.settings[groupkey] = g
self.settings[groupkey][valuekey] = value
return value
__instance = None
def __init__(self):
if ConfigurationSettings.__instance is None:
ConfigurationSettings.__instance = ConfigurationSettings.__impl()
#ConfigurationSettings.__instance.load_app_configurations()
self.__dict__['_ConfigurationSettings__instance'] = ConfigurationSettings.__instance
def __getattr__(self, attr):
""" Delegate access to implementation """
return getattr(self.__instance, attr)
def __getitem__(self, key):
return self.__instance[key]
def __len__(self):
return len(self.__instance)
def __setattr__(self, attr, value):
""" Delegate access to implementation """
return setattr(self.__instance, attr, value)
def __unicode__(self):
return _("Configuration settings: ") + unicode(self.groups())
def config_exists(group, key):
"""Test to see if a setting has been registered"""
return ConfigurationSettings().has_config(group, key)
def config_get(group, key):
"""Get a configuration setting"""
try:
return ConfigurationSettings().get_config(group, key)
except SettingNotSet:
log.debug('SettingNotSet: %s.%s', group, key)
raise
def config_get_localized(group, key, lang_code):
"""Get a localized configuration setting"""
return dict(config_get(group, key).value)[lang_code]
def config_get_group(group):
return ConfigurationSettings()[group]
def config_collect_values(group, groupkey, key, unique=True, skip_missing=True):
"""Look up (group, groupkey) from config, then take the values returned and
use them as groups for a second-stage lookup.
For example:
config_collect_values(PAYMENT, MODULES, CREDITCHOICES)
Stage 1: ['PAYMENT_GOOGLE', 'PAYMENT_AUTHORIZENET']
Stage 2: config_value('PAYMENT_GOOGLE', 'CREDITCHOICES')
+ config_value('PAYMENT_AUTHORIZENET', 'CREDITCHOICES')
Stage 3: (if unique is true) remove dupes
"""
groups = config_value(group, groupkey)
ret = []
for g in groups:
try:
ret.append(config_value(g, key))
except KeyError, ke:
if not skip_missing:
raise SettingNotSet(_("No config %(grp)s.%(key)s") % {'grp': g, 'key': key})
if unique:
out = []
for x in ret:
if not x in out:
out.append(x)
ret = out
return ret
def config_register(value):
"""Register a value or values.
Parameters:
-A Value
"""
return ConfigurationSettings().register(value)
def config_register_list(*args):
for value in args:
config_register(value)
def config_value(group, key, default=_NOTSET):
"""Get a value from the configuration system"""
try:
return config_get(group, key).value
except SettingNotSet:
if default != _NOTSET:
return default
raise
def config_value_safe(group, key, default_value):
"""Get a config value with a default fallback, safe for use during SyncDB."""
raw = default_value
try:
raw = config_value(group, key)
except SettingNotSet:
pass
except ImportError, e:
log.warn("Error getting %s.%s, OK if you are in SyncDB.", group, key)
return raw
def config_choice_values(group, key, skip_missing=True, translate=False):
"""Get pairs of key, label from the setting."""
try:
cfg = config_get(group, key)
choices = cfg.choice_values
except SettingNotSet:
if skip_missing:
return []
else:
raise SettingNotSet('%s.%s' % (group, key))
if translate:
choices = [(k, ugettext(v)) for k, v in choices]
return choices
def config_add_choice(group, key, choice):
"""Add a choice to a value"""
if config_exists(group, key):
cfg = config_get(group, key)
cfg.add_choice(choice)
else:
ConfigurationSettings().preregister_choice(group, key, choice)
|
# Copyright (C) 2015 - 2016 Google Inc.
# 2016 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
import json
import logging
import os
import re
import subprocess
import itertools
import threading
from tempfile import NamedTemporaryFile
from ycmd import responses
from ycmd import utils
from ycmd.completers.completer import Completer
from ycmd.completers.completer_utils import GetFileContents
BINARY_NOT_FOUND_MESSAGE = ( 'TSServer not found. '
'TypeScript 1.5 or higher is required.' )
SERVER_NOT_RUNNING_MESSAGE = 'TSServer is not running.'
MAX_DETAILED_COMPLETIONS = 100
RESPONSE_TIMEOUT_SECONDS = 10
PATH_TO_TSSERVER = utils.FindExecutable( 'tsserver' )
LOGFILE_FORMAT = 'tsserver_'
_logger = logging.getLogger( __name__ )
class DeferredResponse( object ):
"""
A deferred that resolves to a response from TSServer.
"""
def __init__( self, timeout = RESPONSE_TIMEOUT_SECONDS ):
self._event = threading.Event()
self._message = None
self._timeout = timeout
def resolve( self, message ):
self._message = message
self._event.set()
def result( self ):
self._event.wait( timeout = self._timeout )
if not self._event.isSet():
raise RuntimeError( 'Response Timeout' )
message = self._message
if not message[ 'success' ]:
raise RuntimeError( message[ 'message' ] )
if 'body' in message:
return self._message[ 'body' ]
def ShouldEnableTypescriptCompleter():
if not PATH_TO_TSSERVER:
_logger.error( BINARY_NOT_FOUND_MESSAGE )
return False
_logger.info( 'Using TSServer located at {0}'.format( PATH_TO_TSSERVER ) )
return True
class TypeScriptCompleter( Completer ):
"""
Completer for TypeScript.
It uses TSServer which is bundled with TypeScript 1.5
See the protocol here:
https://github.com/Microsoft/TypeScript/blob/2cb0dfd99dc2896958b75e44303d8a7a32e5dc33/src/server/protocol.d.ts
"""
def __init__( self, user_options ):
super( TypeScriptCompleter, self ).__init__( user_options )
self._logfile = None
self._tsserver_handle = None
# Used to prevent threads from concurrently writing to
# the tsserver process' stdin
self._write_lock = threading.Lock()
# Each request sent to tsserver must have a sequence id.
# Responses contain the id sent in the corresponding request.
self._sequenceid = itertools.count()
# Used to prevent threads from concurrently accessing the sequence counter
self._sequenceid_lock = threading.Lock()
self._server_lock = threading.RLock()
# Used to read response only if TSServer is running.
self._tsserver_is_running = threading.Event()
# Start a thread to read response from TSServer.
self._thread = threading.Thread( target = self._ReaderLoop, args = () )
self._thread.daemon = True
self._thread.start()
self._StartServer()
# Used to map sequence id's to their corresponding DeferredResponse
# objects. The reader loop uses this to hand out responses.
self._pending = {}
# Used to prevent threads from concurrently reading and writing to
# the pending response dictionary
self._pending_lock = threading.Lock()
_logger.info( 'Enabling typescript completion' )
def _StartServer( self ):
with self._server_lock:
if self._ServerIsRunning():
return
self._logfile = utils.CreateLogfile( LOGFILE_FORMAT )
tsserver_log = '-file {path} -level {level}'.format( path = self._logfile,
level = _LogLevel() )
# TSServer gets the configuration for the log file through the
# environment variable 'TSS_LOG'. This seems to be undocumented but
# looking at the source code it seems like this is the way:
# https://github.com/Microsoft/TypeScript/blob/8a93b489454fdcbdf544edef05f73a913449be1d/src/server/server.ts#L136
environ = os.environ.copy()
utils.SetEnviron( environ, 'TSS_LOG', tsserver_log )
_logger.info( 'TSServer log file: {0}'.format( self._logfile ) )
# We need to redirect the error stream to the output one on Windows.
self._tsserver_handle = utils.SafePopen( PATH_TO_TSSERVER,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
env = environ )
self._tsserver_is_running.set()
def _ReaderLoop( self ):
"""
Read responses from TSServer and use them to resolve
the DeferredResponse instances.
"""
while True:
self._tsserver_is_running.wait()
try:
message = self._ReadMessage()
except RuntimeError:
_logger.exception( SERVER_NOT_RUNNING_MESSAGE )
self._tsserver_is_running.clear()
continue
# We ignore events for now since we don't have a use for them.
msgtype = message[ 'type' ]
if msgtype == 'event':
eventname = message[ 'event' ]
_logger.info( 'Received {0} event from tsserver'.format( eventname ) )
continue
if msgtype != 'response':
_logger.error( 'Unsupported message type {0}'.format( msgtype ) )
continue
seq = message[ 'request_seq' ]
with self._pending_lock:
if seq in self._pending:
self._pending[ seq ].resolve( message )
del self._pending[ seq ]
def _ReadMessage( self ):
"""Read a response message from TSServer."""
# The headers are pretty similar to HTTP.
# At the time of writing, 'Content-Length' is the only supplied header.
headers = {}
while True:
headerline = self._tsserver_handle.stdout.readline().strip()
if not headerline:
break
key, value = utils.ToUnicode( headerline ).split( ':', 1 )
headers[ key.strip() ] = value.strip()
# The response message is a JSON object which comes back on one line.
# Since this might change in the future, we use the 'Content-Length'
# header.
if 'Content-Length' not in headers:
raise RuntimeError( "Missing 'Content-Length' header" )
contentlength = int( headers[ 'Content-Length' ] )
# TSServer adds a newline at the end of the response message and counts it
# as one character (\n) towards the content length. However, newlines are
# two characters on Windows (\r\n), so we need to take care of that. See
# issue https://github.com/Microsoft/TypeScript/issues/3403
content = self._tsserver_handle.stdout.read( contentlength )
if utils.OnWindows() and content.endswith( b'\r' ):
content += self._tsserver_handle.stdout.read( 1 )
return json.loads( utils.ToUnicode( content ) )
def _BuildRequest( self, command, arguments = None ):
"""Build TSServer request object."""
with self._sequenceid_lock:
seq = next( self._sequenceid )
request = {
'seq': seq,
'type': 'request',
'command': command
}
if arguments:
request[ 'arguments' ] = arguments
return request
def _WriteRequest( self, request ):
"""Write a request to TSServer stdin."""
serialized_request = utils.ToBytes( json.dumps( request ) + '\n' )
with self._write_lock:
try:
self._tsserver_handle.stdin.write( serialized_request )
self._tsserver_handle.stdin.flush()
# IOError is an alias of OSError in Python 3.
except ( AttributeError, IOError ):
_logger.exception( SERVER_NOT_RUNNING_MESSAGE )
raise RuntimeError( SERVER_NOT_RUNNING_MESSAGE )
def _SendCommand( self, command, arguments = None ):
"""
Send a request message to TSServer but don't wait for the response.
This function is to be used when we don't care about the response
to the message that is sent.
"""
request = self._BuildRequest( command, arguments )
self._WriteRequest( request )
def _SendRequest( self, command, arguments = None ):
"""
Send a request message to TSServer and wait
for the response.
"""
request = self._BuildRequest( command, arguments )
deferred = DeferredResponse()
with self._pending_lock:
seq = request[ 'seq' ]
self._pending[ seq ] = deferred
self._WriteRequest( request )
return deferred.result()
def _Reload( self, request_data ):
"""
Syncronize TSServer's view of the file to
the contents of the unsaved buffer.
"""
filename = request_data[ 'filepath' ]
contents = request_data[ 'file_data' ][ filename ][ 'contents' ]
tmpfile = NamedTemporaryFile( delete = False )
tmpfile.write( utils.ToBytes( contents ) )
tmpfile.close()
self._SendRequest( 'reload', {
'file': filename,
'tmpfile': tmpfile.name
} )
utils.RemoveIfExists( tmpfile.name )
def _ServerIsRunning( self ):
with self._server_lock:
return utils.ProcessIsRunning( self._tsserver_handle )
def ServerIsHealthy( self ):
return self._ServerIsRunning()
def SupportedFiletypes( self ):
return [ 'typescript' ]
def ComputeCandidatesInner( self, request_data ):
self._Reload( request_data )
entries = self._SendRequest( 'completions', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'start_codepoint' ]
} )
# A less detailed version of the completion data is returned
# if there are too many entries. This improves responsiveness.
if len( entries ) > MAX_DETAILED_COMPLETIONS:
return [ _ConvertCompletionData(e) for e in entries ]
names = []
namelength = 0
for e in entries:
name = e[ 'name' ]
namelength = max( namelength, len( name ) )
names.append( name )
detailed_entries = self._SendRequest( 'completionEntryDetails', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'start_codepoint' ],
'entryNames': names
} )
return [ _ConvertDetailedCompletionData( e, namelength )
for e in detailed_entries ]
def GetSubcommandsMap( self ):
return {
'RestartServer' : ( lambda self, request_data, args:
self._RestartServer( request_data ) ),
'StopServer' : ( lambda self, request_data, args:
self._StopServer() ),
'GoToDefinition' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'GoToReferences' : ( lambda self, request_data, args:
self._GoToReferences( request_data ) ),
'GoToType' : ( lambda self, request_data, args:
self._GoToType( request_data ) ),
'GetType' : ( lambda self, request_data, args:
self._GetType( request_data ) ),
'GetDoc' : ( lambda self, request_data, args:
self._GetDoc( request_data ) ),
'RefactorRename' : ( lambda self, request_data, args:
self._RefactorRename( request_data, args ) ),
}
def OnBufferVisit( self, request_data ):
filename = request_data[ 'filepath' ]
self._SendCommand( 'open', { 'file': filename } )
def OnBufferUnload( self, request_data ):
filename = request_data[ 'filepath' ]
self._SendCommand( 'close', { 'file': filename } )
def OnFileReadyToParse( self, request_data ):
self._Reload( request_data )
def _GoToDefinition( self, request_data ):
self._Reload( request_data )
try:
filespans = self._SendRequest( 'definition', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_codepoint' ]
} )
span = filespans[ 0 ]
return responses.BuildGoToResponseFromLocation(
_BuildLocation( utils.SplitLines( GetFileContents( request_data,
span[ 'file' ] ) ),
span[ 'file' ],
span[ 'start' ][ 'line' ],
span[ 'start' ][ 'offset' ] ) )
except RuntimeError:
raise RuntimeError( 'Could not find definition' )
def _GoToReferences( self, request_data ):
self._Reload( request_data )
response = self._SendRequest( 'references', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_codepoint' ]
} )
return [
responses.BuildGoToResponseFromLocation(
_BuildLocation( utils.SplitLines( GetFileContents( request_data,
ref[ 'file' ] ) ),
ref[ 'file' ],
ref[ 'start' ][ 'line' ],
ref[ 'start' ][ 'offset' ] ),
ref[ 'lineText' ] )
for ref in response[ 'refs' ]
]
def _GoToType( self, request_data ):
self._Reload( request_data )
try:
filespans = self._SendRequest( 'typeDefinition', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_num' ]
} )
span = filespans[ 0 ]
return responses.BuildGoToResponse(
filepath = span[ 'file' ],
line_num = span[ 'start' ][ 'line' ],
column_num = span[ 'start' ][ 'offset' ]
)
except RuntimeError:
raise RuntimeError( 'Could not find type definition' )
def _GetType( self, request_data ):
self._Reload( request_data )
info = self._SendRequest( 'quickinfo', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_codepoint' ]
} )
return responses.BuildDisplayMessageResponse( info[ 'displayString' ] )
def _GetDoc( self, request_data ):
self._Reload( request_data )
info = self._SendRequest( 'quickinfo', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_codepoint' ]
} )
message = '{0}\n\n{1}'.format( info[ 'displayString' ],
info[ 'documentation' ] )
return responses.BuildDetailedInfoResponse( message )
def _RefactorRename( self, request_data, args ):
if len( args ) != 1:
raise ValueError( 'Please specify a new name to rename it to.\n'
'Usage: RefactorRename <new name>' )
self._Reload( request_data )
response = self._SendRequest( 'rename', {
'file': request_data[ 'filepath' ],
'line': request_data[ 'line_num' ],
'offset': request_data[ 'column_codepoint' ],
'findInComments': False,
'findInStrings': False,
} )
if not response[ 'info' ][ 'canRename' ]:
raise RuntimeError( 'Value cannot be renamed: {0}'.format(
response[ 'info' ][ 'localizedErrorMessage' ] ) )
# The format of the response is:
#
# body {
# info {
# ...
# triggerSpan: {
# length: original_length
# }
# }
#
# locs [ {
# file: file_path
# locs: [
# start: {
# line: line_num
# offset: offset
# }
# end {
# line: line_num
# offset: offset
# }
# ] }
# ]
# }
#
new_name = args[ 0 ]
location = responses.Location( request_data[ 'line_num' ],
request_data[ 'column_num' ],
request_data[ 'filepath' ] )
chunks = []
for file_replacement in response[ 'locs' ]:
chunks.extend( _BuildFixItChunksForFile( request_data,
new_name,
file_replacement ) )
return responses.BuildFixItResponse( [
responses.FixIt( location, chunks )
] )
def _RestartServer( self, request_data ):
with self._server_lock:
self._StopServer()
self._StartServer()
# This is needed because after we restart the TSServer it would lose all
# the information about the files we were working on. This means that the
# newly started TSServer will know nothing about the buffer we're working
# on after restarting the server. So if we restart the server and right
# after that ask for completion in the buffer, the server will timeout.
# So we notify the server that we're working on the current buffer.
self.OnBufferVisit( request_data )
def _StopServer( self ):
with self._server_lock:
if self._ServerIsRunning():
_logger.info( 'Stopping TSServer with PID {0}'.format(
self._tsserver_handle.pid ) )
self._SendCommand( 'exit' )
try:
utils.WaitUntilProcessIsTerminated( self._tsserver_handle,
timeout = 5 )
_logger.info( 'TSServer stopped' )
except RuntimeError:
_logger.exception( 'Error while stopping TSServer' )
self._CleanUp()
def _CleanUp( self ):
utils.CloseStandardStreams( self._tsserver_handle )
self._tsserver_handle = None
if not self.user_options[ 'server_keep_logfiles' ]:
utils.RemoveIfExists( self._logfile )
self._logfile = None
def Shutdown( self ):
self._StopServer()
def DebugInfo( self, request_data ):
with self._server_lock:
if self._ServerIsRunning():
return ( 'TypeScript completer debug information:\n'
' TSServer running\n'
' TSServer process ID: {0}\n'
' TSServer executable: {1}\n'
' TSServer logfile: {2}'.format( self._tsserver_handle.pid,
PATH_TO_TSSERVER,
self._logfile ) )
if self._logfile:
return ( 'TypeScript completer debug information:\n'
' TSServer no longer running\n'
' TSServer executable: {0}\n'
' TSServer logfile: {1}'.format( PATH_TO_TSSERVER,
self._logfile ) )
return ( 'TypeScript completer debug information:\n'
' TSServer is not running\n'
' TSServer executable: {0}'.format( PATH_TO_TSSERVER ) )
def _LogLevel():
return 'verbose' if _logger.isEnabledFor( logging.DEBUG ) else 'normal'
def _ConvertCompletionData( completion_data ):
return responses.BuildCompletionData(
insertion_text = completion_data[ 'name' ],
menu_text = completion_data[ 'name' ],
kind = completion_data[ 'kind' ],
extra_data = completion_data[ 'kind' ]
)
def _ConvertDetailedCompletionData( completion_data, padding = 0 ):
name = completion_data[ 'name' ]
display_parts = completion_data[ 'displayParts' ]
signature = ''.join( [ p[ 'text' ] for p in display_parts ] )
# needed to strip new lines and indentation from the signature
signature = re.sub( '\s+', ' ', signature )
menu_text = '{0} {1}'.format( name.ljust( padding ), signature )
return responses.BuildCompletionData(
insertion_text = name,
menu_text = menu_text,
kind = completion_data[ 'kind' ]
)
def _BuildFixItChunkForRange( new_name,
file_contents,
file_name,
source_range ):
""" returns list FixItChunk for a tsserver source range """
return responses.FixItChunk(
new_name,
responses.Range(
start = _BuildLocation( file_contents,
file_name,
source_range[ 'start' ][ 'line' ],
source_range[ 'start' ][ 'offset' ] ),
end = _BuildLocation( file_contents,
file_name,
source_range[ 'end' ][ 'line' ],
source_range[ 'end' ][ 'offset' ] ) ) )
def _BuildFixItChunksForFile( request_data, new_name, file_replacement ):
""" returns a list of FixItChunk for each replacement range for the
supplied file"""
# On windows, tsserver annoyingly returns file path as C:/blah/blah,
# whereas all other paths in Python are of the C:\\blah\\blah form. We use
# normpath to have python do the conversion for us.
file_path = os.path.normpath( file_replacement[ 'file' ] )
file_contents = utils.SplitLines( GetFileContents( request_data, file_path ) )
return [ _BuildFixItChunkForRange( new_name, file_contents, file_path, r )
for r in file_replacement[ 'locs' ] ]
def _BuildLocation( file_contents, filename, line, offset ):
return responses.Location(
line = line,
# tsserver returns codepoint offsets, but we need byte offsets, so we must
# convert
column = utils.CodepointOffsetToByteOffset( file_contents[ line - 1 ],
offset ),
filename = filename )
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Abdoul Bah (@helldorado) <bahabdoul at gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: proxmox_kvm
short_description: Management of Qemu(KVM) Virtual Machines in Proxmox VE cluster.
description:
- Allows you to create/delete/stop Qemu(KVM) Virtual Machines in Proxmox VE cluster.
version_added: "2.3"
author: "Abdoul Bah (@helldorado) <bahabdoul at gmail.com>"
options:
acpi:
description:
- Specify if ACPI should be enabled/disabled.
type: bool
default: 'yes'
agent:
description:
- Specify if the QEMU Guest Agent should be enabled/disabled.
type: bool
args:
description:
- Pass arbitrary arguments to kvm.
- This option is for experts only!
default: "-serial unix:/var/run/qemu-server/VMID.serial,server,nowait"
api_host:
description:
- Specify the target host of the Proxmox VE cluster.
required: true
api_user:
description:
- Specify the user to authenticate with.
required: true
api_password:
description:
- Specify the password to authenticate with.
- You can use C(PROXMOX_PASSWORD) environment variable.
autostart:
description:
- Specify if the VM should be automatically restarted after crash (currently ignored in PVE API).
type: bool
default: 'no'
balloon:
description:
- Specify the amount of RAM for the VM in MB.
- Using zero disables the balloon driver.
default: 0
bios:
description:
- Specify the BIOS implementation.
choices: ['seabios', 'ovmf']
boot:
description:
- Specify the boot order -> boot on floppy C(a), hard disk C(c), CD-ROM C(d), or network C(n).
- You can combine to set order.
default: cnd
bootdisk:
description:
- Enable booting from specified disk. C((ide|sata|scsi|virtio)\d+)
clone:
description:
- Name of VM to be cloned. If C(vmid) is setted, C(clone) can take arbitrary value but required for initiating the clone.
cores:
description:
- Specify number of cores per socket.
default: 1
cpu:
description:
- Specify emulated CPU type.
default: kvm64
cpulimit:
description:
- Specify if CPU usage will be limited. Value 0 indicates no CPU limit.
- If the computer has 2 CPUs, it has total of '2' CPU time
cpuunits:
description:
- Specify CPU weight for a VM.
- You can disable fair-scheduler configuration by setting this to 0
default: 1000
delete:
description:
- Specify a list of settings you want to delete.
description:
description:
- Specify the description for the VM. Only used on the configuration web interface.
- This is saved as comment inside the configuration file.
digest:
description:
- Specify if to prevent changes if current configuration file has different SHA1 digest.
- This can be used to prevent concurrent modifications.
force:
description:
- Allow to force stop VM.
- Can be used only with states C(stopped), C(restarted).
type: bool
format:
description:
- Target drive's backing file's data format.
- Used only with clone
default: qcow2
choices: [ "cloop", "cow", "qcow", "qcow2", "qed", "raw", "vmdk" ]
freeze:
description:
- Specify if PVE should freeze CPU at startup (use 'c' monitor command to start execution).
type: bool
full:
description:
- Create a full copy of all disk. This is always done when you clone a normal VM.
- For VM templates, we try to create a linked clone by default.
- Used only with clone
type: bool
default: 'yes'
hostpci:
description:
- Specify a hash/dictionary of map host pci devices into guest. C(hostpci='{"key":"value", "key":"value"}').
- Keys allowed are - C(hostpci[n]) where 0 ≤ n ≤ N.
- Values allowed are - C("host="HOSTPCIID[;HOSTPCIID2...]",pcie="1|0",rombar="1|0",x-vga="1|0"").
- The C(host) parameter is Host PCI device pass through. HOSTPCIID syntax is C(bus:dev.func) (hexadecimal numbers).
- C(pcie=boolean) I(default=0) Choose the PCI-express bus (needs the q35 machine model).
- C(rombar=boolean) I(default=1) Specify whether or not the device's ROM will be visible in the guest's memory map.
- C(x-vga=boolean) I(default=0) Enable vfio-vga device support.
- /!\ This option allows direct access to host hardware. So it is no longer possible to migrate such machines - use with special care.
hotplug:
description:
- Selectively enable hotplug features.
- This is a comma separated list of hotplug features C('network', 'disk', 'cpu', 'memory' and 'usb').
- Value 0 disables hotplug completely and value 1 is an alias for the default C('network,disk,usb').
hugepages:
description:
- Enable/disable hugepages memory.
choices: ['any', '2', '1024']
ide:
description:
- A hash/dictionary of volume used as IDE hard disk or CD-ROM. C(ide='{"key":"value", "key":"value"}').
- Keys allowed are - C(ide[n]) where 0 ≤ n ≤ 3.
- Values allowed are - C("storage:size,format=value").
- C(storage) is the storage identifier where to create the disk.
- C(size) is the size of the disk in GB.
- C(format) is the drive's backing file's data format. C(qcow2|raw|subvol).
keyboard:
description:
- Sets the keyboard layout for VNC server.
kvm:
description:
- Enable/disable KVM hardware virtualization.
type: bool
default: 'yes'
localtime:
description:
- Sets the real time clock to local time.
- This is enabled by default if ostype indicates a Microsoft OS.
type: bool
lock:
description:
- Lock/unlock the VM.
choices: ['migrate', 'backup', 'snapshot', 'rollback']
machine:
description:
- Specifies the Qemu machine type.
- type => C((pc|pc(-i440fx)?-\d+\.\d+(\.pxe)?|q35|pc-q35-\d+\.\d+(\.pxe)?))
memory:
description:
- Memory size in MB for instance.
default: 512
migrate_downtime:
description:
- Sets maximum tolerated downtime (in seconds) for migrations.
migrate_speed:
description:
- Sets maximum speed (in MB/s) for migrations.
- A value of 0 is no limit.
name:
description:
- Specifies the VM name. Only used on the configuration web interface.
- Required only for C(state=present).
net:
description:
- A hash/dictionary of network interfaces for the VM. C(net='{"key":"value", "key":"value"}').
- Keys allowed are - C(net[n]) where 0 ≤ n ≤ N.
- Values allowed are - C("model="XX:XX:XX:XX:XX:XX",bridge="value",rate="value",tag="value",firewall="1|0",trunks="vlanid"").
- Model is one of C(e1000 e1000-82540em e1000-82544gc e1000-82545em i82551 i82557b i82559er ne2k_isa ne2k_pci pcnet rtl8139 virtio vmxnet3).
- C(XX:XX:XX:XX:XX:XX) should be an unique MAC address. This is automatically generated if not specified.
- The C(bridge) parameter can be used to automatically add the interface to a bridge device. The Proxmox VE standard bridge is called 'vmbr0'.
- Option C(rate) is used to limit traffic bandwidth from and to this interface. It is specified as floating point number, unit is 'Megabytes per second'.
- If you specify no bridge, we create a kvm 'user' (NATed) network device, which provides DHCP and DNS services.
newid:
description:
- VMID for the clone. Used only with clone.
- If newid is not set, the next available VM ID will be fetched from ProxmoxAPI.
node:
description:
- Proxmox VE node, where the new VM will be created.
- Only required for C(state=present).
- For other states, it will be autodiscovered.
numa:
description:
- A hash/dictionaries of NUMA topology. C(numa='{"key":"value", "key":"value"}').
- Keys allowed are - C(numa[n]) where 0 ≤ n ≤ N.
- Values allowed are - C("cpu="<id[-id];...>",hostnodes="<id[-id];...>",memory="number",policy="(bind|interleave|preferred)"").
- C(cpus) CPUs accessing this NUMA node.
- C(hostnodes) Host NUMA nodes to use.
- C(memory) Amount of memory this NUMA node provides.
- C(policy) NUMA allocation policy.
onboot:
description:
- Specifies whether a VM will be started during system bootup.
type: bool
default: 'yes'
ostype:
description:
- Specifies guest operating system. This is used to enable special optimization/features for specific operating systems.
- The l26 is Linux 2.6/3.X Kernel.
choices: ['other', 'wxp', 'w2k', 'w2k3', 'w2k8', 'wvista', 'win7', 'win8', 'l24', 'l26', 'solaris']
default: l26
parallel:
description:
- A hash/dictionary of map host parallel devices. C(parallel='{"key":"value", "key":"value"}').
- Keys allowed are - (parallel[n]) where 0 ≤ n ≤ 2.
- Values allowed are - C("/dev/parport\d+|/dev/usb/lp\d+").
pool:
description:
- Add the new VM to the specified pool.
protection:
description:
- Enable/disable the protection flag of the VM. This will enable/disable the remove VM and remove disk operations.
type: bool
reboot:
description:
- Allow reboot. If set to C(yes), the VM exit on reboot.
type: bool
revert:
description:
- Revert a pending change.
sata:
description:
- A hash/dictionary of volume used as sata hard disk or CD-ROM. C(sata='{"key":"value", "key":"value"}').
- Keys allowed are - C(sata[n]) where 0 ≤ n ≤ 5.
- Values allowed are - C("storage:size,format=value").
- C(storage) is the storage identifier where to create the disk.
- C(size) is the size of the disk in GB.
- C(format) is the drive's backing file's data format. C(qcow2|raw|subvol).
scsi:
description:
- A hash/dictionary of volume used as SCSI hard disk or CD-ROM. C(scsi='{"key":"value", "key":"value"}').
- Keys allowed are - C(sata[n]) where 0 ≤ n ≤ 13.
- Values allowed are - C("storage:size,format=value").
- C(storage) is the storage identifier where to create the disk.
- C(size) is the size of the disk in GB.
- C(format) is the drive's backing file's data format. C(qcow2|raw|subvol).
scsihw:
description:
- Specifies the SCSI controller model.
choices: ['lsi', 'lsi53c810', 'virtio-scsi-pci', 'virtio-scsi-single', 'megasas', 'pvscsi']
serial:
description:
- A hash/dictionary of serial device to create inside the VM. C('{"key":"value", "key":"value"}').
- Keys allowed are - serial[n](str; required) where 0 ≤ n ≤ 3.
- Values allowed are - C((/dev/.+|socket)).
- /!\ If you pass through a host serial device, it is no longer possible to migrate such machines - use with special care.
shares:
description:
- Rets amount of memory shares for auto-ballooning. (0 - 50000).
- The larger the number is, the more memory this VM gets.
- The number is relative to weights of all other running VMs.
- Using 0 disables auto-ballooning, this means no limit.
skiplock:
description:
- Ignore locks
- Only root is allowed to use this option.
smbios:
description:
- Specifies SMBIOS type 1 fields.
snapname:
description:
- The name of the snapshot. Used only with clone.
sockets:
description:
- Sets the number of CPU sockets. (1 - N).
default: 1
startdate:
description:
- Sets the initial date of the real time clock.
- Valid format for date are C('now') or C('2016-09-25T16:01:21') or C('2016-09-25').
startup:
description:
- Startup and shutdown behavior. C([[order=]\d+] [,up=\d+] [,down=\d+]).
- Order is a non-negative number defining the general startup order.
- Shutdown in done with reverse ordering.
state:
description:
- Indicates desired state of the instance.
- If C(current), the current state of the VM will be fetched. You can access it with C(results.status)
choices: ['present', 'started', 'absent', 'stopped', 'restarted','current']
default: present
storage:
description:
- Target storage for full clone.
tablet:
description:
- Enables/disables the USB tablet device.
type: bool
default: 'no'
target:
description:
- Target node. Only allowed if the original VM is on shared storage.
- Used only with clone
tdf:
description:
- Enables/disables time drift fix.
type: bool
template:
description:
- Enables/disables the template.
type: bool
default: 'no'
timeout:
description:
- Timeout for operations.
default: 30
update:
description:
- If C(yes), the VM will be update with new value.
- Cause of the operations of the API and security reasons, I have disabled the update of the following parameters
- C(net, virtio, ide, sata, scsi). Per example updating C(net) update the MAC address and C(virtio) create always new disk...
type: bool
default: 'no'
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates.
type: bool
default: 'no'
vcpus:
description:
- Sets number of hotplugged vcpus.
vga:
description:
- Select VGA type. If you want to use high resolution modes (>= 1280x1024x16) then you should use option 'std' or 'vmware'.
choices: ['std', 'cirrus', 'vmware', 'qxl', 'serial0', 'serial1', 'serial2', 'serial3', 'qxl2', 'qxl3', 'qxl4']
default: std
virtio:
description:
- A hash/dictionary of volume used as VIRTIO hard disk. C(virtio='{"key":"value", "key":"value"}').
- Keys allowed are - C(virto[n]) where 0 ≤ n ≤ 15.
- Values allowed are - C("storage:size,format=value").
- C(storage) is the storage identifier where to create the disk.
- C(size) is the size of the disk in GB.
- C(format) is the drive's backing file's data format. C(qcow2|raw|subvol).
vmid:
description:
- Specifies the VM ID. Instead use I(name) parameter.
- If vmid is not set, the next available VM ID will be fetched from ProxmoxAPI.
watchdog:
description:
- Creates a virtual hardware watchdog device.
requirements: [ "proxmoxer", "requests" ]
'''
EXAMPLES = '''
# Create new VM with minimal options
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
# Create new VM with minimal options and given vmid
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
vmid : 100
# Create new VM with two network interface options.
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
net : '{"net0":"virtio,bridge=vmbr1,rate=200", "net1":"e1000,bridge=vmbr2,"}'
# Create new VM with one network interface, three virto hard disk, 4 cores, and 2 vcpus.
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
net : '{"net0":"virtio,bridge=vmbr1,rate=200"}'
virtio : '{"virtio0":"VMs_LVM:10", "virtio1":"VMs:2,format=qcow2", "virtio2":"VMs:5,format=raw"}'
cores : 4
vcpus : 2
# Clone VM with only source VM name
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
clone : spynal # The VM source
name : zavala # The target VM name
node : sabrewulf
storage : VMs
format : qcow2
timeout : 500 # Note: The task can take a while. Adapt
# Clone VM with source vmid and target newid and raw format
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
clone : arbitrary_name
vmid : 108
newid : 152
name : zavala # The target VM name
node : sabrewulf
storage : LVM_STO
format : raw
timeout : 300 # Note: The task can take a while. Adapt
# Create new VM and lock it for snapashot.
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
lock : snapshot
# Create new VM and set protection to disable the remove VM and remove disk operations
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
protection : yes
# Start VM
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : started
# Stop VM
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : stopped
# Stop VM with force
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : stopped
force : yes
# Restart VM
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : restarted
# Remove VM
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : absent
# Get VM current state
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
state : current
# Update VM configuration
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
cores : 8
memory : 16384
update : yes
# Delete QEMU parameters
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
delete : 'args,template,cpulimit'
# Revert a pending change
- proxmox_kvm:
api_user : root@pam
api_password: secret
api_host : helldorado
name : spynal
node : sabrewulf
revert : 'template,cpulimit'
'''
RETURN = '''
devices:
description: The list of devices created or used.
returned: success
type: dict
sample: '
{
"ide0": "VMS_LVM:vm-115-disk-1",
"ide1": "VMs:115/vm-115-disk-3.raw",
"virtio0": "VMS_LVM:vm-115-disk-2",
"virtio1": "VMs:115/vm-115-disk-1.qcow2",
"virtio2": "VMs:115/vm-115-disk-2.raw"
}'
mac:
description: List of mac address created and net[n] attached. Useful when you want to use provision systems like Foreman via PXE.
returned: success
type: dict
sample: '
{
"net0": "3E:6E:97:D2:31:9F",
"net1": "B6:A1:FC:EF:78:A4"
}'
vmid:
description: The VM vmid.
returned: success
type: int
sample: 115
status:
description:
- The current virtual machine status.
- Returned only when C(state=current)
returned: success
type: dict
sample: '{
"changed": false,
"msg": "VM kropta with vmid = 110 is running",
"status": "running"
}'
'''
import os
import re
import time
import traceback
from distutils.version import LooseVersion
try:
from proxmoxer import ProxmoxAPI
HAS_PROXMOXER = True
except ImportError:
HAS_PROXMOXER = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
VZ_TYPE = 'qemu'
def get_nextvmid(module, proxmox):
try:
vmid = proxmox.cluster.nextid.get()
return vmid
except Exception as e:
module.fail_json(msg="Unable to get next vmid. Failed with exception: %s" % to_native(e),
exception=traceback.format_exc())
def get_vmid(proxmox, name):
return [vm['vmid'] for vm in proxmox.cluster.resources.get(type='vm') if vm.get('name') == name]
def get_vm(proxmox, vmid):
return [vm for vm in proxmox.cluster.resources.get(type='vm') if vm['vmid'] == int(vmid)]
def node_check(proxmox, node):
return [True for nd in proxmox.nodes.get() if nd['node'] == node]
def get_vminfo(module, proxmox, node, vmid, **kwargs):
global results
results = {}
mac = {}
devices = {}
try:
vm = proxmox.nodes(node).qemu(vmid).config.get()
except Exception as e:
module.fail_json(msg='Getting information for VM with vmid = %s failed with exception: %s' % (vmid, e))
# Sanitize kwargs. Remove not defined args and ensure True and False converted to int.
kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
# Convert all dict in kwargs to elements. For hostpci[n], ide[n], net[n], numa[n], parallel[n], sata[n], scsi[n], serial[n], virtio[n]
for k in list(kwargs.keys()):
if isinstance(kwargs[k], dict):
kwargs.update(kwargs[k])
del kwargs[k]
# Split information by type
for k, v in kwargs.items():
if re.match(r'net[0-9]', k) is not None:
interface = k
k = vm[k]
k = re.search('=(.*?),', k).group(1)
mac[interface] = k
if (re.match(r'virtio[0-9]', k) is not None or
re.match(r'ide[0-9]', k) is not None or
re.match(r'scsi[0-9]', k) is not None or
re.match(r'sata[0-9]', k) is not None):
device = k
k = vm[k]
k = re.search('(.*?),', k).group(1)
devices[device] = k
results['mac'] = mac
results['devices'] = devices
results['vmid'] = int(vmid)
def settings(module, proxmox, vmid, node, name, timeout, **kwargs):
proxmox_node = proxmox.nodes(node)
# Sanitize kwargs. Remove not defined args and ensure True and False converted to int.
kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
if getattr(proxmox_node, VZ_TYPE)(vmid).config.set(**kwargs) is None:
return True
else:
return False
def create_vm(module, proxmox, vmid, newid, node, name, memory, cpu, cores, sockets, timeout, update, **kwargs):
# Available only in PVE 4
only_v4 = ['force', 'protection', 'skiplock']
# valide clone parameters
valid_clone_params = ['format', 'full', 'pool', 'snapname', 'storage', 'target']
clone_params = {}
# Default args for vm. Note: -args option is for experts only. It allows you to pass arbitrary arguments to kvm.
vm_args = "-serial unix:/var/run/qemu-server/{0}.serial,server,nowait".format(vmid)
proxmox_node = proxmox.nodes(node)
# Sanitize kwargs. Remove not defined args and ensure True and False converted to int.
kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
kwargs.update(dict([k, int(v)] for k, v in kwargs.items() if isinstance(v, bool)))
# The features work only on PVE 4
if PVE_MAJOR_VERSION < 4:
for p in only_v4:
if p in kwargs:
del kwargs[p]
# If update, don't update disk (virtio, ide, sata, scsi) and network interface
if update:
if 'virtio' in kwargs:
del kwargs['virtio']
if 'sata' in kwargs:
del kwargs['sata']
if 'scsi' in kwargs:
del kwargs['scsi']
if 'ide' in kwargs:
del kwargs['ide']
if 'net' in kwargs:
del kwargs['net']
# Convert all dict in kwargs to elements. For hostpci[n], ide[n], net[n], numa[n], parallel[n], sata[n], scsi[n], serial[n], virtio[n]
for k in list(kwargs.keys()):
if isinstance(kwargs[k], dict):
kwargs.update(kwargs[k])
del kwargs[k]
# Rename numa_enabled to numa. According the API documentation
if 'numa_enabled' in kwargs:
kwargs['numa'] = kwargs['numa_enabled']
del kwargs['numa_enabled']
# -args and skiplock require root@pam user
if module.params['api_user'] == "root@pam" and module.params['args'] is None:
if not update:
kwargs['args'] = vm_args
elif module.params['api_user'] == "root@pam" and module.params['args'] is not None:
kwargs['args'] = module.params['args']
elif module.params['api_user'] != "root@pam" and module.params['args'] is not None:
module.fail_json(msg='args parameter require root@pam user. ')
if module.params['api_user'] != "root@pam" and module.params['skiplock'] is not None:
module.fail_json(msg='skiplock parameter require root@pam user. ')
if update:
if getattr(proxmox_node, VZ_TYPE)(vmid).config.set(name=name, memory=memory, cpu=cpu, cores=cores, sockets=sockets, **kwargs) is None:
return True
else:
return False
elif module.params['clone'] is not None:
for param in valid_clone_params:
if module.params[param] is not None:
clone_params[param] = module.params[param]
clone_params.update(dict([k, int(v)] for k, v in clone_params.items() if isinstance(v, bool)))
taskid = proxmox_node.qemu(vmid).clone.post(newid=newid, name=name, **clone_params)
else:
taskid = getattr(proxmox_node, VZ_TYPE).create(vmid=vmid, name=name, memory=memory, cpu=cpu, cores=cores, sockets=sockets, **kwargs)
while timeout:
if (proxmox_node.tasks(taskid).status.get()['status'] == 'stopped' and
proxmox_node.tasks(taskid).status.get()['exitstatus'] == 'OK'):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for creating VM. Last line in task before timeout: %s' %
proxmox_node.tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def start_vm(module, proxmox, vm, vmid, timeout):
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.start.post()
while timeout:
if (proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped' and
proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK'):
return True
timeout -= 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for starting VM. Last line in task before timeout: %s'
% proxmox.nodes(vm[0]['node']).tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def stop_vm(module, proxmox, vm, vmid, timeout, force):
if force:
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.shutdown.post(forceStop=1)
else:
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.shutdown.post()
while timeout:
if (proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped' and
proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK'):
return True
timeout -= 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for stopping VM. Last line in task before timeout: %s'
% proxmox.nodes(vm[0]['node']).tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def proxmox_version(proxmox):
apireturn = proxmox.version.get()
return LooseVersion(apireturn['version'])
def main():
module = AnsibleModule(
argument_spec=dict(
acpi=dict(type='bool', default='yes'),
agent=dict(type='bool'),
args=dict(type='str', default=None),
api_host=dict(required=True),
api_user=dict(required=True),
api_password=dict(no_log=True),
autostart=dict(type='bool', default='no'),
balloon=dict(type='int', default=0),
bios=dict(choices=['seabios', 'ovmf']),
boot=dict(type='str', default='cnd'),
bootdisk=dict(type='str'),
clone=dict(type='str', default=None),
cores=dict(type='int', default=1),
cpu=dict(type='str', default='kvm64'),
cpulimit=dict(type='int'),
cpuunits=dict(type='int', default=1000),
delete=dict(type='str', default=None),
description=dict(type='str'),
digest=dict(type='str'),
force=dict(type='bool', default=None),
format=dict(type='str', default='qcow2', choices=['cloop', 'cow', 'qcow', 'qcow2', 'qed', 'raw', 'vmdk']),
freeze=dict(type='bool'),
full=dict(type='bool', default='yes'),
hostpci=dict(type='dict'),
hotplug=dict(type='str'),
hugepages=dict(choices=['any', '2', '1024']),
ide=dict(type='dict', default=None),
keyboard=dict(type='str'),
kvm=dict(type='bool', default='yes'),
localtime=dict(type='bool'),
lock=dict(choices=['migrate', 'backup', 'snapshot', 'rollback']),
machine=dict(type='str'),
memory=dict(type='int', default=512),
migrate_downtime=dict(type='int'),
migrate_speed=dict(type='int'),
name=dict(type='str'),
net=dict(type='dict'),
newid=dict(type='int', default=None),
node=dict(),
numa=dict(type='dict'),
numa_enabled=dict(type='bool'),
onboot=dict(type='bool', default='yes'),
ostype=dict(default='l26', choices=['other', 'wxp', 'w2k', 'w2k3', 'w2k8', 'wvista', 'win7', 'win8', 'l24', 'l26', 'solaris']),
parallel=dict(type='dict'),
pool=dict(type='str'),
protection=dict(type='bool'),
reboot=dict(type='bool'),
revert=dict(type='str', default=None),
sata=dict(type='dict'),
scsi=dict(type='dict'),
scsihw=dict(choices=['lsi', 'lsi53c810', 'virtio-scsi-pci', 'virtio-scsi-single', 'megasas', 'pvscsi']),
serial=dict(type='dict'),
shares=dict(type='int'),
skiplock=dict(type='bool'),
smbios=dict(type='str'),
snapname=dict(type='str'),
sockets=dict(type='int', default=1),
startdate=dict(type='str'),
startup=dict(),
state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted', 'current']),
storage=dict(type='str'),
tablet=dict(type='bool', default='no'),
target=dict(type='str'),
tdf=dict(type='bool'),
template=dict(type='bool', default='no'),
timeout=dict(type='int', default=30),
update=dict(type='bool', default='no'),
validate_certs=dict(type='bool', default='no'),
vcpus=dict(type='int', default=None),
vga=dict(default='std', choices=['std', 'cirrus', 'vmware', 'qxl', 'serial0', 'serial1', 'serial2', 'serial3', 'qxl2', 'qxl3', 'qxl4']),
virtio=dict(type='dict', default=None),
vmid=dict(type='int', default=None),
watchdog=dict(),
),
mutually_exclusive=[('delete', 'revert'), ('delete', 'update'), ('revert', 'update'), ('clone', 'update'), ('clone', 'delete'), ('clone', 'revert')],
required_one_of=[('name', 'vmid',)],
required_if=[('state', 'present', ['node'])]
)
if not HAS_PROXMOXER:
module.fail_json(msg='proxmoxer required for this module')
api_user = module.params['api_user']
api_host = module.params['api_host']
api_password = module.params['api_password']
clone = module.params['clone']
cpu = module.params['cpu']
cores = module.params['cores']
delete = module.params['delete']
memory = module.params['memory']
name = module.params['name']
newid = module.params['newid']
node = module.params['node']
revert = module.params['revert']
sockets = module.params['sockets']
state = module.params['state']
timeout = module.params['timeout']
update = bool(module.params['update'])
vmid = module.params['vmid']
validate_certs = module.params['validate_certs']
# If password not set get it from PROXMOX_PASSWORD env
if not api_password:
try:
api_password = os.environ['PROXMOX_PASSWORD']
except KeyError as e:
module.fail_json(msg='You should set api_password param or use PROXMOX_PASSWORD environment variable')
try:
proxmox = ProxmoxAPI(api_host, user=api_user, password=api_password, verify_ssl=validate_certs)
global VZ_TYPE
global PVE_MAJOR_VERSION
PVE_MAJOR_VERSION = 3 if proxmox_version(proxmox) < LooseVersion('4.0') else 4
except Exception as e:
module.fail_json(msg='authorization on proxmox cluster failed with exception: %s' % e)
# If vmid not set get the Next VM id from ProxmoxAPI
# If vm name is set get the VM id from ProxmoxAPI
if not vmid:
if state == 'present' and (not update and not clone) and (not delete and not revert):
try:
vmid = get_nextvmid(module, proxmox)
except Exception as e:
module.fail_json(msg="Can't get the next vmid for VM {0} automatically. Ensure your cluster state is good".format(name))
else:
try:
if not clone:
vmid = get_vmid(proxmox, name)[0]
else:
vmid = get_vmid(proxmox, clone)[0]
except Exception as e:
if not clone:
module.fail_json(msg="VM {0} does not exist in cluster.".format(name))
else:
module.fail_json(msg="VM {0} does not exist in cluster.".format(clone))
if clone is not None:
if get_vmid(proxmox, name):
module.exit_json(changed=False, msg="VM with name <%s> already exists" % name)
if vmid is not None:
vm = get_vm(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s does not exist in cluster' % vmid)
if not newid:
try:
newid = get_nextvmid(module, proxmox)
except Exception as e:
module.fail_json(msg="Can't get the next vmid for VM {0} automatically. Ensure your cluster state is good".format(name))
else:
vm = get_vm(proxmox, newid)
if vm:
module.exit_json(changed=False, msg="vmid %s with VM name %s already exists" % (newid, name))
if delete is not None:
try:
settings(module, proxmox, vmid, node, name, timeout, delete=delete)
module.exit_json(changed=True, msg="Settings has deleted on VM {0} with vmid {1}".format(name, vmid))
except Exception as e:
module.fail_json(msg='Unable to delete settings on VM {0} with vmid {1}: '.format(name, vmid) + str(e))
elif revert is not None:
try:
settings(module, proxmox, vmid, node, name, timeout, revert=revert)
module.exit_json(changed=True, msg="Settings has reverted on VM {0} with vmid {1}".format(name, vmid))
except Exception as e:
module.fail_json(msg='Unable to revert settings on VM {0} with vmid {1}: Maybe is not a pending task... '.format(name, vmid) + str(e))
if state == 'present':
try:
if get_vm(proxmox, vmid) and not (update or clone):
module.exit_json(changed=False, msg="VM with vmid <%s> already exists" % vmid)
elif get_vmid(proxmox, name) and not (update or clone):
module.exit_json(changed=False, msg="VM with name <%s> already exists" % name)
elif not (node, name):
module.fail_json(msg='node, name is mandatory for creating/updating vm')
elif not node_check(proxmox, node):
module.fail_json(msg="node '%s' does not exist in cluster" % node)
create_vm(module, proxmox, vmid, newid, node, name, memory, cpu, cores, sockets, timeout, update,
acpi=module.params['acpi'],
agent=module.params['agent'],
autostart=module.params['autostart'],
balloon=module.params['balloon'],
bios=module.params['bios'],
boot=module.params['boot'],
bootdisk=module.params['bootdisk'],
cpulimit=module.params['cpulimit'],
cpuunits=module.params['cpuunits'],
description=module.params['description'],
digest=module.params['digest'],
force=module.params['force'],
freeze=module.params['freeze'],
hostpci=module.params['hostpci'],
hotplug=module.params['hotplug'],
hugepages=module.params['hugepages'],
ide=module.params['ide'],
keyboard=module.params['keyboard'],
kvm=module.params['kvm'],
localtime=module.params['localtime'],
lock=module.params['lock'],
machine=module.params['machine'],
migrate_downtime=module.params['migrate_downtime'],
migrate_speed=module.params['migrate_speed'],
net=module.params['net'],
numa=module.params['numa'],
numa_enabled=module.params['numa_enabled'],
onboot=module.params['onboot'],
ostype=module.params['ostype'],
parallel=module.params['parallel'],
pool=module.params['pool'],
protection=module.params['protection'],
reboot=module.params['reboot'],
sata=module.params['sata'],
scsi=module.params['scsi'],
scsihw=module.params['scsihw'],
serial=module.params['serial'],
shares=module.params['shares'],
skiplock=module.params['skiplock'],
smbios1=module.params['smbios'],
snapname=module.params['snapname'],
startdate=module.params['startdate'],
startup=module.params['startup'],
tablet=module.params['tablet'],
target=module.params['target'],
tdf=module.params['tdf'],
template=module.params['template'],
vcpus=module.params['vcpus'],
vga=module.params['vga'],
virtio=module.params['virtio'],
watchdog=module.params['watchdog'])
if not clone:
get_vminfo(module, proxmox, node, vmid,
ide=module.params['ide'],
net=module.params['net'],
sata=module.params['sata'],
scsi=module.params['scsi'],
virtio=module.params['virtio'])
if update:
module.exit_json(changed=True, msg="VM %s with vmid %s updated" % (name, vmid))
elif clone is not None:
module.exit_json(changed=True, msg="VM %s with newid %s cloned from vm with vmid %s" % (name, newid, vmid))
else:
module.exit_json(changed=True, msg="VM %s with vmid %s deployed" % (name, vmid), **results)
except Exception as e:
if update:
module.fail_json(msg="Unable to update vm {0} with vmid {1}=".format(name, vmid) + str(e))
elif clone is not None:
module.fail_json(msg="Unable to clone vm {0} from vmid {1}=".format(name, vmid) + str(e))
else:
module.fail_json(msg="creation of %s VM %s with vmid %s failed with exception=%s" % (VZ_TYPE, name, vmid, e))
elif state == 'started':
try:
vm = get_vm(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid <%s> does not exist in cluster' % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running':
module.exit_json(changed=False, msg="VM %s is already running" % vmid)
if start_vm(module, proxmox, vm, vmid, timeout):
module.exit_json(changed=True, msg="VM %s started" % vmid)
except Exception as e:
module.fail_json(msg="starting of VM %s failed with exception: %s" % (vmid, e))
elif state == 'stopped':
try:
vm = get_vm(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s does not exist in cluster' % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'stopped':
module.exit_json(changed=False, msg="VM %s is already stopped" % vmid)
if stop_vm(module, proxmox, vm, vmid, timeout, force=module.params['force']):
module.exit_json(changed=True, msg="VM %s is shutting down" % vmid)
except Exception as e:
module.fail_json(msg="stopping of VM %s failed with exception: %s" % (vmid, e))
elif state == 'restarted':
try:
vm = get_vm(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s does not exist in cluster' % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'stopped':
module.exit_json(changed=False, msg="VM %s is not running" % vmid)
if stop_vm(module, proxmox, vm, vmid, timeout, force=module.params['force']) and start_vm(module, proxmox, vm, vmid, timeout):
module.exit_json(changed=True, msg="VM %s is restarted" % vmid)
except Exception as e:
module.fail_json(msg="restarting of VM %s failed with exception: %s" % (vmid, e))
elif state == 'absent':
try:
vm = get_vm(proxmox, vmid)
if not vm:
module.exit_json(changed=False, msg="VM %s does not exist" % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running':
module.exit_json(changed=False, msg="VM %s is running. Stop it before deletion." % vmid)
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE).delete(vmid)
while timeout:
if (proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped' and
proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK'):
module.exit_json(changed=True, msg="VM %s removed" % vmid)
timeout -= 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for removing VM. Last line in task before timeout: %s'
% proxmox.nodes(vm[0]['node']).tasks(taskid).log.get()[:1])
time.sleep(1)
except Exception as e:
module.fail_json(msg="deletion of VM %s failed with exception: %s" % (vmid, e))
elif state == 'current':
status = {}
try:
vm = get_vm(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s does not exist in cluster' % vmid)
current = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status']
status['status'] = current
if status:
module.exit_json(changed=False, msg="VM %s with vmid = %s is %s" % (name, vmid, current), **status)
except Exception as e:
module.fail_json(msg="Unable to get vm {0} with vmid = {1} status: ".format(name, vmid) + str(e))
if __name__ == '__main__':
main()
|
<<<<<<< HEAD
<<<<<<< HEAD
""" Test Codecs (used by test_charmapcodec)
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x78: "abc", # 1-n decoding mapping
b"abc": 0x0078,# 1-n encoding mapping
0x01: None, # decoding mapping to <undefined>
0x79: "", # decoding mapping to <remove character>
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
=======
""" Test Codecs (used by test_charmapcodec)
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x78: "abc", # 1-n decoding mapping
b"abc": 0x0078,# 1-n encoding mapping
0x01: None, # decoding mapping to <undefined>
0x79: "", # decoding mapping to <remove character>
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
""" Test Codecs (used by test_charmapcodec)
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x78: "abc", # 1-n decoding mapping
b"abc": 0x0078,# 1-n encoding mapping
0x01: None, # decoding mapping to <undefined>
0x79: "", # decoding mapping to <remove character>
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
# -*- coding:utf-8 -*-
#注意类的声明方法:
class Thing(object):
#self是需要有的 否则报错
def test(self,hi):
print hi
a = Thing()#这是声明类的一个对象
a.test("hahaha")
print "---------------------------------"
test_things = "Apple Orange Crows Telephone Light Suger"
print "There is not 10 things in that list,let's fix it."
stuff = test_things.split(' ')
more_stuff = ["Mon","Tues","Wed","Thris","Fir","Sat","Sun","MOON"]
while len(stuff)!=10:
#注意:pop()方法是从后往前出,先出最后一个
next = more_stuff.pop()
print "Adding ", next
#append()方法是增加
stuff.append(next)
print "There are %d elements in list " %len(stuff)
print "Here we go: ",stuff
#注意:下标从0开始!!!
print stuff[1]
#注意:-1是最后一个,下标从-1开始 ,从后向前遍历
print "stuff[-1] == ",stuff[-1]
print "stuff[-2] == ",stuff[-2]
print stuff.pop()
#注意:并没有修改数组实际的元素
print ' '.join(stuff)
#stuff[3:5]类似range()
print '#'.join(stuff[3:5])
print stuff
|
import math
import game_puck_board as board
from utils import *
racket_speed = 50
velocity_x = 0.0
velocity_z = 0.0
initial_pox_x = 0.0
initial_pox_z = 0.0
pos_x = 0.0
pos_z = 0.0
target_pos_x = 0.0
target_pos_z = 0.0
prev_pos_x = 0.0
prev_pos_z = 0.0
width = 2.0
length = 0.5
def setPosition(x,z):
global pos_x, pos_z
pos_x = x
pos_z = z
def reset():
global pos_x, pos_z, initial_pox_x, initial_pox_z, velocity_x, velocity_z
pos_x = initial_pox_x
pos_z = initial_pox_z
prev_pos_x = pos_x
prev_pos_z = pos_z
def setMouse(x,y):
global target_pos_x, target_pos_z
x = Clamp(x, 0, 1.0)
y = Clamp(y, 0, 0.5)
target_pos_x = RangeAdjust(x, 0.0, 1.0, board.board_width * -0.5 + (width * 0.5), board.board_width * 0.5 - (width * 0.5))
target_pos_z = RangeAdjust(y, 0.0, 0.5, board.board_length * 0.5 - (length * 0.5), board.board_length * 0.35 - (length * 0.5))
def update(dt):
global pos_x, pos_z, velocity_x, velocity_z, prev_pos_x, prev_pos_z
prev_pos_x = pos_x
prev_pos_z = pos_z
pos_x += (target_pos_x - pos_x) * dt * racket_speed
pos_z += (target_pos_z - pos_z) * dt * racket_speed
velocity_x = pos_x - prev_pos_x
velocity_z = pos_z - prev_pos_z
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""libcloud driver for the Linode(R) API
This driver implements all libcloud functionality for the Linode API.
Since the API is a bit more fine-grained, create_node abstracts a significant
amount of work (and may take a while to run).
Linode home page http://www.linode.com/
Linode API documentation http://www.linode.com/api/
Alternate bindings for reference http://github.com/tjfontaine/linode-python
Linode(R) is a registered trademark of Linode, LLC.
"""
import os
import re
try:
import simplejson as json
except ImportError:
import json
import itertools
import binascii
from datetime import datetime
from copy import copy
from libcloud.utils.py3 import PY3, httplib
from libcloud.utils.networking import is_private_subnet
from libcloud.common.linode import (API_ROOT, LinodeException,
LinodeConnection, LinodeConnectionV4,
LinodeDisk, LinodeIPAddress,
LinodeExceptionV4,
LINODE_PLAN_IDS, LINODE_DISK_FILESYSTEMS,
LINODE_DISK_FILESYSTEMS_V4,
DEFAULT_API_VERSION)
from libcloud.compute.types import Provider, NodeState, StorageVolumeState
from libcloud.compute.base import NodeDriver, NodeSize, Node, NodeLocation
from libcloud.compute.base import NodeAuthPassword, NodeAuthSSHKey
from libcloud.compute.base import NodeImage, StorageVolume
class LinodeNodeDriver(NodeDriver):
name = 'Linode'
website = 'http://www.linode.com/'
type = Provider.LINODE
def __new__(cls, key, secret=None, secure=True, host=None, port=None,
api_version=DEFAULT_API_VERSION, region=None, **kwargs):
if cls is LinodeNodeDriver:
if api_version == '3.0':
cls = LinodeNodeDriverV3
elif api_version == '4.0':
cls = LinodeNodeDriverV4
else:
raise NotImplementedError(
'No Linode driver found for API version: %s' %
(api_version))
return super(LinodeNodeDriver, cls).__new__(cls)
class LinodeNodeDriverV3(LinodeNodeDriver):
"""libcloud driver for the Linode API
Rough mapping of which is which:
- list_nodes linode.list
- reboot_node linode.reboot
- destroy_node linode.delete
- create_node linode.create, linode.update,
linode.disk.createfromdistribution,
linode.disk.create, linode.config.create,
linode.ip.addprivate, linode.boot
- list_sizes avail.linodeplans
- list_images avail.distributions
- list_locations avail.datacenters
- list_volumes linode.disk.list
- destroy_volume linode.disk.delete
For more information on the Linode API, be sure to read the reference:
http://www.linode.com/api/
"""
connectionCls = LinodeConnection
_linode_plan_ids = LINODE_PLAN_IDS
_linode_disk_filesystems = LINODE_DISK_FILESYSTEMS
features = {'create_node': ['ssh_key', 'password']}
def __init__(self, key, secret=None, secure=True, host=None, port=None,
api_version=None, region=None, **kwargs):
"""Instantiate the driver with the given API key
:param key: the API key to use (required)
:type key: ``str``
:rtype: ``None``
"""
self.datacenter = None
NodeDriver.__init__(self, key)
# Converts Linode's state from DB to a NodeState constant.
LINODE_STATES = {
(-2): NodeState.UNKNOWN, # Boot Failed
(-1): NodeState.PENDING, # Being Created
0: NodeState.PENDING, # Brand New
1: NodeState.RUNNING, # Running
2: NodeState.STOPPED, # Powered Off
3: NodeState.REBOOTING, # Shutting Down
4: NodeState.UNKNOWN # Reserved
}
def list_nodes(self):
"""
List all Linodes that the API key can access
This call will return all Linodes that the API key in use has access
to.
If a node is in this list, rebooting will work; however, creation and
destruction are a separate grant.
:return: List of node objects that the API key can access
:rtype: ``list`` of :class:`Node`
"""
params = {"api_action": "linode.list"}
data = self.connection.request(API_ROOT, params=params).objects[0]
return self._to_nodes(data)
def start_node(self, node):
"""
Boot the given Linode
"""
params = {"api_action": "linode.boot", "LinodeID": node.id}
self.connection.request(API_ROOT, params=params)
return True
def stop_node(self, node):
"""
Shutdown the given Linode
"""
params = {"api_action": "linode.shutdown", "LinodeID": node.id}
self.connection.request(API_ROOT, params=params)
return True
def reboot_node(self, node):
"""
Reboot the given Linode
Will issue a shutdown job followed by a boot job, using the last booted
configuration. In most cases, this will be the only configuration.
:param node: the Linode to reboot
:type node: :class:`Node`
:rtype: ``bool``
"""
params = {"api_action": "linode.reboot", "LinodeID": node.id}
self.connection.request(API_ROOT, params=params)
return True
def destroy_node(self, node):
"""Destroy the given Linode
Will remove the Linode from the account and issue a prorated credit. A
grant for removing Linodes from the account is required, otherwise this
method will fail.
In most cases, all disk images must be removed from a Linode before the
Linode can be removed; however, this call explicitly skips those
safeguards. There is no going back from this method.
:param node: the Linode to destroy
:type node: :class:`Node`
:rtype: ``bool``
"""
params = {"api_action": "linode.delete", "LinodeID": node.id,
"skipChecks": True}
self.connection.request(API_ROOT, params=params)
return True
def create_node(self, name, image, size, auth, location=None, ex_swap=None,
ex_rsize=None, ex_kernel=None, ex_payment=None,
ex_comment=None, ex_private=False, lconfig=None,
lroot=None, lswap=None):
"""Create a new Linode, deploy a Linux distribution, and boot
This call abstracts much of the functionality of provisioning a Linode
and getting it booted. A global grant to add Linodes to the account is
required, as this call will result in a billing charge.
Note that there is a safety valve of 5 Linodes per hour, in order to
prevent a runaway script from ruining your day.
:keyword name: the name to assign the Linode (mandatory)
:type name: ``str``
:keyword image: which distribution to deploy on the Linode (mandatory)
:type image: :class:`NodeImage`
:keyword size: the plan size to create (mandatory)
:type size: :class:`NodeSize`
:keyword auth: an SSH key or root password (mandatory)
:type auth: :class:`NodeAuthSSHKey` or :class:`NodeAuthPassword`
:keyword location: which datacenter to create the Linode in
:type location: :class:`NodeLocation`
:keyword ex_swap: size of the swap partition in MB (128)
:type ex_swap: ``int``
:keyword ex_rsize: size of the root partition in MB (plan size - swap).
:type ex_rsize: ``int``
:keyword ex_kernel: a kernel ID from avail.kernels (Latest 2.6 Stable).
:type ex_kernel: ``str``
:keyword ex_payment: one of 1, 12, or 24; subscription length (1)
:type ex_payment: ``int``
:keyword ex_comment: a small comment for the configuration (libcloud)
:type ex_comment: ``str``
:keyword ex_private: whether or not to request a private IP (False)
:type ex_private: ``bool``
:keyword lconfig: what to call the configuration (generated)
:type lconfig: ``str``
:keyword lroot: what to call the root image (generated)
:type lroot: ``str``
:keyword lswap: what to call the swap space (generated)
:type lswap: ``str``
:return: Node representing the newly-created Linode
:rtype: :class:`Node`
"""
auth = self._get_and_check_auth(auth)
# Pick a location (resolves LIBCLOUD-41 in JIRA)
if location:
chosen = location.id
elif self.datacenter:
chosen = self.datacenter
else:
raise LinodeException(0xFB, "Need to select a datacenter first")
# Step 0: Parameter validation before we purchase
# We're especially careful here so we don't fail after purchase, rather
# than getting halfway through the process and having the API fail.
# Plan ID
plans = self.list_sizes()
if size.id not in [p.id for p in plans]:
raise LinodeException(0xFB, "Invalid plan ID -- avail.plans")
# Payment schedule
payment = "1" if not ex_payment else str(ex_payment)
if payment not in ["1", "12", "24"]:
raise LinodeException(0xFB, "Invalid subscription (1, 12, 24)")
ssh = None
root = None
# SSH key and/or root password
if isinstance(auth, NodeAuthSSHKey):
ssh = auth.pubkey # pylint: disable=no-member
elif isinstance(auth, NodeAuthPassword):
root = auth.password
if not ssh and not root:
raise LinodeException(0xFB, "Need SSH key or root password")
if root is not None and len(root) < 6:
raise LinodeException(0xFB, "Root password is too short")
# Swap size
try:
swap = 128 if not ex_swap else int(ex_swap)
except Exception:
raise LinodeException(0xFB, "Need an integer swap size")
# Root partition size
imagesize = (size.disk - swap) if not ex_rsize else\
int(ex_rsize)
if (imagesize + swap) > size.disk:
raise LinodeException(0xFB, "Total disk images are too big")
# Distribution ID
distros = self.list_images()
if image.id not in [d.id for d in distros]:
raise LinodeException(0xFB,
"Invalid distro -- avail.distributions")
# Kernel
if ex_kernel:
kernel = ex_kernel
else:
if image.extra['64bit']:
# For a list of available kernel ids, see
# https://www.linode.com/kernels/
kernel = 138
else:
kernel = 137
params = {"api_action": "avail.kernels"}
kernels = self.connection.request(API_ROOT, params=params).objects[0]
if kernel not in [z["KERNELID"] for z in kernels]:
raise LinodeException(0xFB, "Invalid kernel -- avail.kernels")
# Comments
comments = "Created by Apache libcloud <https://www.libcloud.org>" if\
not ex_comment else ex_comment
# Step 1: linode.create
params = {
"api_action": "linode.create",
"DatacenterID": chosen,
"PlanID": size.id,
"PaymentTerm": payment
}
data = self.connection.request(API_ROOT, params=params).objects[0]
linode = {"id": data["LinodeID"]}
# Step 1b. linode.update to rename the Linode
params = {
"api_action": "linode.update",
"LinodeID": linode["id"],
"Label": name
}
self.connection.request(API_ROOT, params=params)
# Step 1c. linode.ip.addprivate if it was requested
if ex_private:
params = {
"api_action": "linode.ip.addprivate",
"LinodeID": linode["id"]
}
self.connection.request(API_ROOT, params=params)
# Step 1d. Labels
# use the linode id as the name can be up to 63 chars and the labels
# are limited to 48 chars
label = {
"lconfig": "[%s] Configuration Profile" % linode["id"],
"lroot": "[%s] %s Disk Image" % (linode["id"], image.name),
"lswap": "[%s] Swap Space" % linode["id"]
}
if lconfig:
label['lconfig'] = lconfig
if lroot:
label['lroot'] = lroot
if lswap:
label['lswap'] = lswap
# Step 2: linode.disk.createfromdistribution
if not root:
root = binascii.b2a_base64(os.urandom(8)).decode('ascii').strip()
params = {
"api_action": "linode.disk.createfromdistribution",
"LinodeID": linode["id"],
"DistributionID": image.id,
"Label": label["lroot"],
"Size": imagesize,
"rootPass": root,
}
if ssh:
params["rootSSHKey"] = ssh
data = self.connection.request(API_ROOT, params=params).objects[0]
linode["rootimage"] = data["DiskID"]
# Step 3: linode.disk.create for swap
params = {
"api_action": "linode.disk.create",
"LinodeID": linode["id"],
"Label": label["lswap"],
"Type": "swap",
"Size": swap
}
data = self.connection.request(API_ROOT, params=params).objects[0]
linode["swapimage"] = data["DiskID"]
# Step 4: linode.config.create for main profile
disks = "%s,%s,,,,,,," % (linode["rootimage"], linode["swapimage"])
params = {
"api_action": "linode.config.create",
"LinodeID": linode["id"],
"KernelID": kernel,
"Label": label["lconfig"],
"Comments": comments,
"DiskList": disks
}
if ex_private:
params['helper_network'] = True
params['helper_distro'] = True
data = self.connection.request(API_ROOT, params=params).objects[0]
linode["config"] = data["ConfigID"]
# Step 5: linode.boot
params = {
"api_action": "linode.boot",
"LinodeID": linode["id"],
"ConfigID": linode["config"]
}
self.connection.request(API_ROOT, params=params)
# Make a node out of it and hand it back
params = {"api_action": "linode.list", "LinodeID": linode["id"]}
data = self.connection.request(API_ROOT, params=params).objects[0]
nodes = self._to_nodes(data)
if len(nodes) == 1:
node = nodes[0]
if getattr(auth, "generated", False):
node.extra['password'] = auth.password
return node
return None
def ex_resize_node(self, node, size):
"""Resizes a Linode from one plan to another
Immediately shuts the Linode down, charges/credits the account,
and issue a migration to another host server.
Requires a size (numeric), which is the desired PlanID available from
avail.LinodePlans()
After resize is complete the node needs to be booted
"""
params = {"api_action": "linode.resize", "LinodeID": node.id,
"PlanID": size}
self.connection.request(API_ROOT, params=params)
return True
def ex_start_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
return self.start_node(node=node)
def ex_stop_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
return self.stop_node(node=node)
def ex_rename_node(self, node, name):
"""Renames a node"""
params = {
"api_action": "linode.update",
"LinodeID": node.id,
"Label": name
}
self.connection.request(API_ROOT, params=params)
return True
def list_sizes(self, location=None):
"""
List available Linode plans
Gets the sizes that can be used for creating a Linode. Since available
Linode plans vary per-location, this method can also be passed a
location to filter the availability.
:keyword location: the facility to retrieve plans in
:type location: :class:`NodeLocation`
:rtype: ``list`` of :class:`NodeSize`
"""
params = {"api_action": "avail.linodeplans"}
data = self.connection.request(API_ROOT, params=params).objects[0]
sizes = []
for obj in data:
n = NodeSize(id=obj["PLANID"], name=obj["LABEL"], ram=obj["RAM"],
disk=(obj["DISK"] * 1024), bandwidth=obj["XFER"],
price=obj["PRICE"], driver=self.connection.driver)
sizes.append(n)
return sizes
def list_images(self):
"""
List available Linux distributions
Retrieve all Linux distributions that can be deployed to a Linode.
:rtype: ``list`` of :class:`NodeImage`
"""
params = {"api_action": "avail.distributions"}
data = self.connection.request(API_ROOT, params=params).objects[0]
distros = []
for obj in data:
i = NodeImage(id=obj["DISTRIBUTIONID"],
name=obj["LABEL"],
driver=self.connection.driver,
extra={'pvops': obj['REQUIRESPVOPSKERNEL'],
'64bit': obj['IS64BIT']})
distros.append(i)
return distros
def list_locations(self):
"""
List available facilities for deployment
Retrieve all facilities that a Linode can be deployed in.
:rtype: ``list`` of :class:`NodeLocation`
"""
params = {"api_action": "avail.datacenters"}
data = self.connection.request(API_ROOT, params=params).objects[0]
nl = []
for dc in data:
country = None
if "USA" in dc["LOCATION"]:
country = "US"
elif "UK" in dc["LOCATION"]:
country = "GB"
elif "JP" in dc["LOCATION"]:
country = "JP"
else:
country = "??"
nl.append(NodeLocation(dc["DATACENTERID"],
dc["LOCATION"],
country,
self))
return nl
def linode_set_datacenter(self, dc):
"""
Set the default datacenter for Linode creation
Since Linodes must be created in a facility, this function sets the
default that :class:`create_node` will use. If a location keyword is
not passed to :class:`create_node`, this method must have already been
used.
:keyword dc: the datacenter to create Linodes in unless specified
:type dc: :class:`NodeLocation`
:rtype: ``bool``
"""
did = dc.id
params = {"api_action": "avail.datacenters"}
data = self.connection.request(API_ROOT, params=params).objects[0]
for datacenter in data:
if did == dc["DATACENTERID"]:
self.datacenter = did
return
dcs = ", ".join([d["DATACENTERID"] for d in data])
self.datacenter = None
raise LinodeException(0xFD, "Invalid datacenter (use one of %s)" % dcs)
def destroy_volume(self, volume):
"""
Destroys disk volume for the Linode. Linode id is to be provided as
extra["LinodeId"] whithin :class:`StorageVolume`. It can be retrieved
by :meth:`libcloud.compute.drivers.linode.LinodeNodeDriver\
.ex_list_volumes`.
:param volume: Volume to be destroyed
:type volume: :class:`StorageVolume`
:rtype: ``bool``
"""
if not isinstance(volume, StorageVolume):
raise LinodeException(0xFD, "Invalid volume instance")
if volume.extra["LINODEID"] is None:
raise LinodeException(0xFD, "Missing LinodeID")
params = {
"api_action": "linode.disk.delete",
"LinodeID": volume.extra["LINODEID"],
"DiskID": volume.id,
}
self.connection.request(API_ROOT, params=params)
return True
def ex_create_volume(self, size, name, node, fs_type):
"""
Create disk for the Linode.
:keyword size: Size of volume in megabytes (required)
:type size: ``int``
:keyword name: Name of the volume to be created
:type name: ``str``
:keyword node: Node to attach volume to.
:type node: :class:`Node`
:keyword fs_type: The formatted type of this disk. Valid types are:
ext3, ext4, swap, raw
:type fs_type: ``str``
:return: StorageVolume representing the newly-created volume
:rtype: :class:`StorageVolume`
"""
# check node
if not isinstance(node, Node):
raise LinodeException(0xFD, "Invalid node instance")
# check space available
total_space = node.extra['TOTALHD']
existing_volumes = self.ex_list_volumes(node)
used_space = 0
for volume in existing_volumes:
used_space = used_space + volume.size
available_space = total_space - used_space
if available_space < size:
raise LinodeException(0xFD, "Volume size too big. Available space\
%d" % available_space)
# check filesystem type
if fs_type not in self._linode_disk_filesystems:
raise LinodeException(0xFD, "Not valid filesystem type")
params = {
"api_action": "linode.disk.create",
"LinodeID": node.id,
"Label": name,
"Type": fs_type,
"Size": size
}
data = self.connection.request(API_ROOT, params=params).objects[0]
volume = data["DiskID"]
# Make a volume out of it and hand it back
params = {
"api_action": "linode.disk.list",
"LinodeID": node.id,
"DiskID": volume
}
data = self.connection.request(API_ROOT, params=params).objects[0]
return self._to_volumes(data)[0]
def ex_list_volumes(self, node, disk_id=None):
"""
List existing disk volumes for for given Linode.
:keyword node: Node to list disk volumes for. (required)
:type node: :class:`Node`
:keyword disk_id: Id for specific disk volume. (optional)
:type disk_id: ``int``
:rtype: ``list`` of :class:`StorageVolume`
"""
if not isinstance(node, Node):
raise LinodeException(0xFD, "Invalid node instance")
params = {
"api_action": "linode.disk.list",
"LinodeID": node.id
}
# Add param if disk_id was specified
if disk_id is not None:
params["DiskID"] = disk_id
data = self.connection.request(API_ROOT, params=params).objects[0]
return self._to_volumes(data)
def _to_volumes(self, objs):
"""
Covert returned JSON volumes into StorageVolume instances
:keyword objs: ``list`` of JSON dictionaries representing the
StorageVolumes
:type objs: ``list``
:return: ``list`` of :class:`StorageVolume`s
"""
volumes = {}
for o in objs:
vid = o["DISKID"]
volumes[vid] = vol = StorageVolume(id=vid, name=o["LABEL"],
size=int(o["SIZE"]),
driver=self.connection.driver)
vol.extra = copy(o)
return list(volumes.values())
def _to_nodes(self, objs):
"""Convert returned JSON Linodes into Node instances
:keyword objs: ``list`` of JSON dictionaries representing the Linodes
:type objs: ``list``
:return: ``list`` of :class:`Node`s"""
# Get the IP addresses for the Linodes
nodes = {}
batch = []
for o in objs:
lid = o["LINODEID"]
nodes[lid] = n = Node(id=lid, name=o["LABEL"], public_ips=[],
private_ips=[],
state=self.LINODE_STATES[o["STATUS"]],
driver=self.connection.driver)
n.extra = copy(o)
n.extra["PLANID"] = self._linode_plan_ids.get(o.get("TOTALRAM"))
batch.append({"api_action": "linode.ip.list", "LinodeID": lid})
# Avoid batch limitation
ip_answers = []
args = [iter(batch)] * 25
if PY3:
izip_longest = itertools.zip_longest # pylint: disable=no-member
else:
izip_longest = getattr(itertools, 'izip_longest', _izip_longest)
for twenty_five in izip_longest(*args):
twenty_five = [q for q in twenty_five if q]
params = {"api_action": "batch",
"api_requestArray": json.dumps(twenty_five)}
req = self.connection.request(API_ROOT, params=params)
if not req.success() or len(req.objects) == 0:
return None
ip_answers.extend(req.objects)
# Add the returned IPs to the nodes and return them
for ip_list in ip_answers:
for ip in ip_list:
lid = ip["LINODEID"]
which = nodes[lid].public_ips if ip["ISPUBLIC"] == 1 else\
nodes[lid].private_ips
which.append(ip["IPADDRESS"])
return list(nodes.values())
class LinodeNodeDriverV4(LinodeNodeDriver):
connectionCls = LinodeConnectionV4
_linode_disk_filesystems = LINODE_DISK_FILESYSTEMS_V4
LINODE_STATES = {
'running': NodeState.RUNNING,
'stopped': NodeState.STOPPED,
'provisioning': NodeState.STARTING,
'offline': NodeState.STOPPED,
'booting': NodeState.STARTING,
'rebooting': NodeState.REBOOTING,
'shutting_down': NodeState.STOPPING,
'deleting': NodeState.PENDING,
'migrating': NodeState.MIGRATING,
'rebuilding': NodeState.UPDATING,
'cloning': NodeState.MIGRATING,
'restoring': NodeState.PENDING,
'resizing': NodeState.RECONFIGURING
}
LINODE_DISK_STATES = {
'ready': StorageVolumeState.AVAILABLE,
'not ready': StorageVolumeState.CREATING,
'deleting': StorageVolumeState.DELETING
}
LINODE_VOLUME_STATES = {
'creating': StorageVolumeState.CREATING,
'active': StorageVolumeState.AVAILABLE,
'resizing': StorageVolumeState.UPDATING,
'contact_support': StorageVolumeState.UNKNOWN
}
def list_nodes(self):
"""
Returns a list of Linodes the API key in use has access
to view.
:return: List of node objects
:rtype: ``list`` of :class:`Node`
"""
data = self._paginated_request('/v4/linode/instances', 'data')
return [self._to_node(obj) for obj in data]
def list_sizes(self):
"""
Returns a list of Linode Types
: rtype: ``list`` of :class: `NodeSize`
"""
data = self._paginated_request('/v4/linode/types', 'data')
return [self._to_size(obj) for obj in data]
def list_images(self):
"""
Returns a list of images
:rtype: ``list`` of :class:`NodeImage`
"""
data = self._paginated_request('/v4/images', 'data')
return [self._to_image(obj) for obj in data]
def list_locations(self):
"""
Lists the Regions available for Linode services
:rtype: ``list`` of :class:`NodeLocation`
"""
data = self._paginated_request('/v4/regions', 'data')
return [self._to_location(obj) for obj in data]
def start_node(self, node):
"""Boots a node the API Key has permission to modify
:param node: the node to start
:type node: :class:`Node`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
response = self.connection.request('/v4/linode/instances/%s/boot'
% node.id,
method='POST')
return response.status == httplib.OK
def ex_start_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
return self.start_node(node=node)
def stop_node(self, node):
"""Shuts down a a node the API Key has permission to modify.
:param node: the Linode to destroy
:type node: :class:`Node`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
response = self.connection.request('/v4/linode/instances/%s/shutdown'
% node.id,
method='POST')
return response.status == httplib.OK
def ex_stop_node(self, node):
# NOTE: This method is here for backward compatibility reasons after
# this method was promoted to be part of the standard compute API in
# Libcloud v2.7.0
return self.stop_node(node=node)
def destroy_node(self, node):
"""Deletes a node the API Key has permission to `read_write`
:param node: the Linode to destroy
:type node: :class:`Node`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
response = self.connection.request('/v4/linode/instances/%s'
% node.id,
method='DELETE')
return response.status == httplib.OK
def reboot_node(self, node):
"""Reboots a node the API Key has permission to modify.
:param node: the Linode to destroy
:type node: :class:`Node`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
response = self.connection.request('/v4/linode/instances/%s/reboot'
% node.id,
method='POST')
return response.status == httplib.OK
def create_node(self, location, size, image=None,
name=None, root_pass=None, ex_authorized_keys=None,
ex_authorized_users=None, ex_tags=None,
ex_backups_enabled=False, ex_private_ip=False):
"""Creates a Linode Instance.
In order for this request to complete successfully,
the user must have the `add_linodes` grant as this call
will incur a charge.
:param location: which region to create the node in
:type location: :class:`NodeLocation`
:param size: the plan size to create
:type size: :class:`NodeSize`
:keyword image: which distribution to deploy on the node
:type image: :class:`NodeImage`
:keyword name: the name to assign to node.\
Must start with an alpha character.\
May only consist of alphanumeric characters,\
dashes (-), underscores (_) or periods (.).\
Cannot have two dashes (--), underscores (__) or periods (..) in a row.
:type name: ``str``
:keyword root_pass: the root password (required if image is provided)
:type root_pass: ``str``
:keyword ex_authorized_keys: a list of public SSH keys
:type ex_authorized_keys: ``list`` of ``str``
:keyword ex_authorized_users: a list of usernames.\
If the usernames have associated SSH keys,\
the keys will be appended to the root users `authorized_keys`
:type ex_authorized_users: ``list`` of ``str``
:keyword ex_tags: list of tags for the node
:type ex_tags: ``list`` of ``str``
:keyword ex_backups_enabled: whether to be enrolled \
in the Linode Backup service (False)
:type ex_backups_enabled: ``bool``
:keyword ex_private_ip: whether or not to request a private IP
:type ex_private_ip: ``bool``
:return: Node representing the newly-created node
:rtype: :class:`Node`
"""
if not isinstance(location, NodeLocation):
raise LinodeExceptionV4("Invalid location instance")
if not isinstance(size, NodeSize):
raise LinodeExceptionV4("Invalid size instance")
attr = {'region': location.id,
'type': size.id,
'private_ip': ex_private_ip,
'backups_enabled': ex_backups_enabled,
}
if image is not None:
if root_pass is None:
raise LinodeExceptionV4("root password required "
"when providing an image")
attr['image'] = image.id
attr['root_pass'] = root_pass
if name is not None:
valid_name = r'^[a-zA-Z]((?!--|__|\.\.)[a-zA-Z0-9-_.])+$'
if not re.match(valid_name, name):
raise LinodeExceptionV4("Invalid name")
attr['label'] = name
if ex_authorized_keys is not None:
attr['authorized_keys'] = list(ex_authorized_keys)
if ex_authorized_users is not None:
attr['authorized_users'] = list(ex_authorized_users)
if ex_tags is not None:
attr['tags'] = list(ex_tags)
response = self.connection.request('/v4/linode/instances',
data=json.dumps(attr),
method='POST').object
return self._to_node(response)
def ex_get_node(self, node_id):
"""
Return a Node object based on a node ID.
:keyword node_id: Node's ID
:type node_id: ``str``
:return: Created node
:rtype : :class:`Node`
"""
response = self.connection.request('/v4/linode/instances/%s'
% node_id).object
return self._to_node(response)
def ex_list_disks(self, node):
"""
List disks associated with the node.
:param node: Node to list disks. (required)
:type node: :class:`Node`
:rtype: ``list`` of :class:`LinodeDisk`
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
data = self._paginated_request('/v4/linode/instances/%s/disks'
% node.id, 'data')
return [self._to_disk(obj) for obj in data]
def ex_create_disk(self, size, name, node, fs_type,
image=None, ex_root_pass=None, ex_authorized_keys=None,
ex_authorized_users=None, ex_read_only=False):
"""
Adds a new disk to node
:param size: Size of disk in megabytes (required)
:type size: ``int``
:param name: Name of the disk to be created (required)
:type name: ``str``
:param node: Node to attach disk to (required)
:type node: :class:`Node`
:param fs_type: The formatted type of this disk. Valid types are:
ext3, ext4, swap, raw, initrd
:type fs_type: ``str``
:keyword image: Image to deploy the volume from
:type image: :class:`NodeImage`
:keyword ex_root_pass: root password,required \
if an image is provided
:type ex_root_pass: ``str``
:keyword ex_authorized_keys: a list of SSH keys
:type ex_authorized_keys: ``list`` of ``str``
:keyword ex_authorized_users: a list of usernames \
that will have their SSH keys,\
if any, automatically appended \
to the root user's ~/.ssh/authorized_keys file.
:type ex_authorized_users: ``list`` of ``str``
:keyword ex_read_only: if true, this disk is read-only
:type ex_read_only: ``bool``
:return: LinodeDisk representing the newly-created disk
:rtype: :class:`LinodeDisk`
"""
attr = {'label': str(name),
'size': int(size),
'filesystem': fs_type,
'read_only': ex_read_only}
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
if fs_type not in self._linode_disk_filesystems:
raise LinodeExceptionV4("Not valid filesystem type")
if image is not None:
if not isinstance(image, NodeImage):
raise LinodeExceptionV4("Invalid image instance")
# when an image is set, root pass must be set as well
if ex_root_pass is None:
raise LinodeExceptionV4("root_pass is required when "
"deploying an image")
attr['image'] = image.id
attr['root_pass'] = ex_root_pass
if ex_authorized_keys is not None:
attr['authorized_keys'] = list(ex_authorized_keys)
if ex_authorized_users is not None:
attr['authorized_users'] = list(ex_authorized_users)
response = self.connection.request('/v4/linode/instances/%s/disks'
% node.id,
data=json.dumps(attr),
method='POST').object
return self._to_disk(response)
def ex_destroy_disk(self, node, disk):
"""
Destroys disk for the given node.
:param node: The Node the disk is attached to. (required)
:type node: :class:`Node`
:param disk: LinodeDisk to be destroyed (required)
:type disk: :class:`LinodeDisk`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
if not isinstance(disk, LinodeDisk):
raise LinodeExceptionV4("Invalid disk instance")
if node.state != self.LINODE_STATES['stopped']:
raise LinodeExceptionV4("Node needs to be stopped"
" before disk is destroyed")
response = self.connection.request('/v4/linode/instances/%s/disks/%s'
% (node.id, disk.id),
method='DELETE')
return response.status == httplib.OK
def list_volumes(self):
"""Get all volumes of the account
:rtype: `list` of :class: `StorageVolume`
"""
data = self._paginated_request('/v4/volumes', 'data')
return [self._to_volume(obj) for obj in data]
def create_volume(self, name, size, location=None, node=None, tags=None):
"""Creates a volume and optionally attaches it to a node.
:param name: The name to be given to volume (required).\
Must start with an alpha character. \
May only consist of alphanumeric characters,\
dashes (-), underscores (_)\
Cannot have two dashes (--), underscores (__) in a row.
:type name: `str`
:param size: Size in gigabytes (required)
:type size: `int`
:keyword location: Location to create the node.\
Required if node is not given.
:type location: :class:`NodeLocation`
:keyword volume: Node to attach the volume to
:type volume: :class:`Node`
:keyword tags: tags to apply to volume
:type tags: `list` of `str`
:rtype: :class: `StorageVolume`
"""
valid_name = '^[a-zA-Z]((?!--|__)[a-zA-Z0-9-_])+$'
if not re.match(valid_name, name):
raise LinodeExceptionV4("Invalid name")
attr = {
'label': name,
'size': int(size),
}
if node is not None:
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
attr['linode_id'] = int(node.id)
else:
# location is only required if a node is not given
if location:
if not isinstance(location, NodeLocation):
raise LinodeExceptionV4("Invalid location instance")
attr['region'] = location.id
else:
raise LinodeExceptionV4("Region must be provided "
"when node is not")
if tags is not None:
attr['tags'] = list(tags)
response = self.connection.request('/v4/volumes',
data=json.dumps(attr),
method='POST').object
return self._to_volume(response)
def attach_volume(self, node, volume, persist_across_boots=True):
"""Attaches a volume to a node.
Volume and node must be located in the same region
:param node: Node to attach the volume to(required)
:type node: :class:`Node`
:param volume: Volume to be attached (required)
:type volume: :class:`StorageVolume`
:keyword persist_across_boots: Wether volume should be \
attached to node across boots
:type persist_across_boots: `bool`
:rtype: :class: `StorageVolume`
"""
if not isinstance(volume, StorageVolume):
raise LinodeExceptionV4("Invalid volume instance")
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
if volume.extra['linode_id'] is not None:
raise LinodeExceptionV4("Volume is already attached to a node")
if node.extra['location'] != volume.extra['location']:
raise LinodeExceptionV4("Volume and node "
"must be on the same region")
attr = {
'linode_id': int(node.id),
'persist_across_boots': persist_across_boots
}
response = self.connection.request('/v4/volumes/%s/attach'
% volume.id,
data=json.dumps(attr),
method='POST').object
return self._to_volume(response)
def detach_volume(self, volume):
"""Detaches a volume from a node.
:param volume: Volume to be detached (required)
:type volume: :class:`StorageVolume`
:rtype: ``bool``
"""
if not isinstance(volume, StorageVolume):
raise LinodeExceptionV4("Invalid volume instance")
if volume.extra['linode_id'] is None:
raise LinodeExceptionV4("Volume is already detached")
response = self.connection.request('/v4/volumes/%s/detach'
% volume.id,
method='POST')
return response.status == httplib.OK
def destroy_volume(self, volume):
"""Destroys the volume given.
:param volume: Volume to be deleted (required)
:type volume: :class:`StorageVolume`
:rtype: ``bool``
"""
if not isinstance(volume, StorageVolume):
raise LinodeExceptionV4("Invalid volume instance")
if volume.extra['linode_id'] is not None:
raise LinodeExceptionV4("Volume must be detached"
" before it can be deleted.")
response = self.connection.request('/v4/volumes/%s'
% volume.id,
method='DELETE')
return response.status == httplib.OK
def ex_resize_volume(self, volume, size):
"""Resizes the volume given.
:param volume: Volume to be resized
:type volume: :class:`StorageVolume`
:param size: new volume size in gigabytes, must be\
greater than current size
:type size: `int`
:rtype: ``bool``
"""
if not isinstance(volume, StorageVolume):
raise LinodeExceptionV4("Invalid volume instance")
if volume.size >= size:
raise LinodeExceptionV4("Volumes can only be resized up")
attr = {
'size': size
}
response = self.connection.request('/v4/volumes/%s/resize'
% volume.id,
data=json.dumps(attr),
method='POST')
return response.status == httplib.OK
def ex_clone_volume(self, volume, name):
"""Clones the volume given
:param volume: Volume to be cloned
:type volume: :class:`StorageVolume`
:param name: new cloned volume name
:type name: `str`
:rtype: :class:`StorageVolume`
"""
if not isinstance(volume, StorageVolume):
raise LinodeExceptionV4("Invalid volume instance")
attr = {
'label': name
}
response = self.connection.request('/v4/volumes/%s/clone'
% volume.id,
data=json.dumps(attr),
method='POST').object
return self._to_volume(response)
def ex_get_volume(self, volume_id):
"""
Return a Volume object based on a volume ID.
:param volume_id: Volume's id
:type volume_id: ``str``
:return: A StorageVolume object for the volume
:rtype: :class:`StorageVolume`
"""
response = self.connection.request('/v4/volumes/%s'
% volume_id).object
return self._to_volume(response)
def create_image(self, disk, name=None, description=None):
"""Creates a private image from a LinodeDisk.
Images are limited to three per account.
:param disk: LinodeDisk to create the image from (required)
:type disk: :class:`LinodeDisk`
:keyword name: A name for the image.\
Defaults to the name of the disk \
it is being created from if not provided
:type name: `str`
:keyword description: A description of the image
:type description: `str`
:return: The newly created NodeImage
:rtype: :class:`NodeImage`
"""
if not isinstance(disk, LinodeDisk):
raise LinodeExceptionV4("Invalid disk instance")
attr = {
'disk_id': int(disk.id),
'label': name,
'description': description
}
response = self.connection.request('/v4/images',
data=json.dumps(attr),
method='POST').object
return self._to_image(response)
def delete_image(self, image):
"""Deletes a private image
:param image: NodeImage to delete (required)
:type image: :class:`NodeImage`
:rtype: ``bool``
"""
if not isinstance(image, NodeImage):
raise LinodeExceptionV4("Invalid image instance")
response = self.connection.request('/v4/images/%s'
% image.id,
method='DELETE')
return response.status == httplib.OK
def ex_list_addresses(self):
"""List IP addresses
:return: LinodeIPAddress list
:rtype: `list` of :class:`LinodeIPAddress`
"""
data = self._paginated_request('/v4/networking/ips', 'data')
return [self._to_address(obj) for obj in data]
def ex_list_node_addresses(self, node):
"""List all IPv4 addresses attached to node
:param node: Node to list IP addresses
:type node: :class:`Node`
:return: LinodeIPAddress list
:rtype: `list` of :class:`LinodeIPAddress`
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
response = self.connection.request('/v4/linode/instances/%s/ips'
% node.id).object
return self._to_addresses(response)
def ex_allocate_private_address(self, node, address_type='ipv4'):
"""Allocates a private IPv4 address to node.Only ipv4 is currently supported
:param node: Node to attach the IP address
:type node: :class:`Node`
:keyword address_type: Type of IP address
:type address_type: `str`
:return: The newly created LinodeIPAddress
:rtype: :class:`LinodeIPAddress`
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
# Only ipv4 is currently supported
if address_type != 'ipv4':
raise LinodeExceptionV4("Address type not supported")
# Only one private IP address can be allocated
if len(node.private_ips) >= 1:
raise LinodeExceptionV4("Nodes can have up to one private IP")
attr = {
'public': False,
'type': address_type
}
response = self.connection.request('/v4/linode/instances/%s/ips'
% node.id,
data=json.dumps(attr),
method='POST').object
return self._to_address(response)
def ex_share_address(self, node, addresses):
"""Shares an IP with another node.This can be used to allow one Linode
to begin serving requests should another become unresponsive.
:param node: Node to share the IP addresses with
:type node: :class:`Node`
:keyword addresses: List of IP addresses to share
:type address_type: `list` of :class: `LinodeIPAddress`
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
if not all(isinstance(address, LinodeIPAddress)
for address in addresses):
raise LinodeExceptionV4("Invalid address instance")
attr = {
'ips': [address.inet for address in addresses],
'linode_id': int(node.id)
}
response = self.connection.request('/v4/networking/ipv4/share',
data=json.dumps(attr),
method='POST')
return response.status == httplib.OK
def ex_resize_node(self, node, size, allow_auto_disk_resize=False):
"""
Resizes a node the API Key has read_write permission
to a different Type.
The following requirements must be met:
- The node must not have a pending migration
- The account cannot have an outstanding balance
- The node must not have more disk allocation than the new size allows
:param node: the Linode to resize
:type node: :class:`Node`
:param size: the size of the new node
:type size: :class:`NodeSize`
:keyword allow_auto_disk_resize: Automatically resize disks \
when resizing a node.
:type allow_auto_disk_resize: ``bool``
:rtype: ``bool``
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
if not isinstance(size, NodeSize):
raise LinodeExceptionV4("Invalid node size")
attr = {'type': size.id,
'allow_auto_disk_resize': allow_auto_disk_resize}
response = self.connection.request(
'/v4/linode/instances/%s/resize' % node.id,
data=json.dumps(attr),
method='POST')
return response.status == httplib.OK
def ex_rename_node(self, node, name):
"""Renames a node
:param node: the Linode to resize
:type node: :class:`Node`
:param name: the node's new name
:type name: ``str``
:return: Changed Node
:rtype: :class:`Node`
"""
if not isinstance(node, Node):
raise LinodeExceptionV4("Invalid node instance")
attr = {'label': name}
response = self.connection.request(
'/v4/linode/instances/%s' % node.id,
data=json.dumps(attr),
method='PUT').object
return self._to_node(response)
def _to_node(self, data):
extra = {
'tags': data['tags'],
'location': data['region'],
'ipv6': data['ipv6'],
'hypervisor': data['hypervisor'],
'specs': data['specs'],
'alerts': data['alerts'],
'backups': data['backups'],
'watchdog_enabled': data['watchdog_enabled']
}
public_ips = [ip for ip in data['ipv4'] if not is_private_subnet(ip)]
private_ips = [ip for ip in data['ipv4'] if is_private_subnet(ip)]
return Node(
id=data['id'],
name=data['label'],
state=self.LINODE_STATES[data['status']],
public_ips=public_ips,
private_ips=private_ips,
driver=self,
size=data['type'],
image=data['image'],
created_at=self._to_datetime(data['created']),
extra=extra)
def _to_datetime(self, strtime):
return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S")
def _to_size(self, data):
extra = {
'class': data['class'],
'monthly_price': data['price']['monthly'],
'addons': data['addons'],
'successor': data['successor'],
'transfer': data['transfer'],
'vcpus': data['vcpus'],
'gpus': data['gpus']
}
return NodeSize(
id=data['id'],
name=data['label'],
ram=data['memory'],
disk=data['disk'],
bandwidth=data['network_out'],
price=data['price']['hourly'],
driver=self,
extra=extra
)
def _to_image(self, data):
extra = {
'type': data['type'],
'description': data['description'],
'created': self._to_datetime(data['created']),
'created_by': data['created_by'],
'is_public': data['is_public'],
'size': data['size'],
'eol': data['eol'],
'vendor': data['vendor'],
}
return NodeImage(
id=data['id'],
name=data['label'],
driver=self,
extra=extra
)
def _to_location(self, data):
extra = {
'status': data['status'],
'capabilities': data['capabilities'],
'resolvers': data['resolvers']
}
return NodeLocation(
id=data['id'],
name=data['id'],
country=data['country'].upper(),
driver=self,
extra=extra)
def _to_volume(self, data):
extra = {
'created': self._to_datetime(data['created']),
'tags': data['tags'],
'location': data['region'],
'linode_id': data['linode_id'],
'linode_label': data['linode_label'],
'state': self.LINODE_VOLUME_STATES[data['status']],
'filesystem_path': data['filesystem_path']
}
return StorageVolume(
id=str(data['id']),
name=data['label'],
size=data['size'],
driver=self,
extra=extra)
def _to_disk(self, data):
return LinodeDisk(
id=data['id'],
state=self.LINODE_DISK_STATES[data['status']],
name=data['label'],
filesystem=data['filesystem'],
size=data['size'],
driver=self,
)
def _to_address(self, data):
extra = {
'gateway': data['gateway'],
'subnet_mask': data['subnet_mask'],
'prefix': data['prefix'],
'rdns': data['rdns'],
'node_id': data['linode_id'],
'region': data['region'],
}
return LinodeIPAddress(
inet=data['address'],
public=data['public'],
version=data['type'],
driver=self,
extra=extra
)
def _to_addresses(self, data):
addresses = data['ipv4']['public'] + data['ipv4']['private']
return [self._to_address(address) for address in addresses]
def _paginated_request(self, url, obj, params=None):
"""
Perform multiple calls in order to have a full list of elements when
the API responses are paginated.
:param url: API endpoint
:type url: ``str``
:param obj: Result object key
:type obj: ``str``
:param params: Request parameters
:type params: ``dict``
:return: ``list`` of API response objects
:rtype: ``list``
"""
objects = []
params = params if params is not None else {}
ret = self.connection.request(url, params=params).object
data = list(ret.get(obj, []))
current_page = int(ret.get('page', 1))
num_of_pages = int(ret.get('pages', 1))
objects.extend(data)
for page in range(current_page + 1, num_of_pages + 1):
# add param to request next page
params['page'] = page
ret = self.connection.request(url, params=params).object
data = list(ret.get(obj, []))
objects.extend(data)
return objects
def _izip_longest(*args, **kwds):
"""Taken from Python docs
http://docs.python.org/library/itertools.html#itertools.izip
"""
fillvalue = kwds.get('fillvalue')
def sentinel(counter=([fillvalue] * (len(args) - 1)).pop):
yield counter() # yields the fillvalue, or raises IndexError
fillers = itertools.repeat(fillvalue)
iters = [itertools.chain(it, sentinel(), fillers) for it in args]
try:
for tup in itertools.izip(*iters): # pylint: disable=no-member
yield tup
except IndexError:
pass
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from nose.tools import eq_, ok_
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
from clastic import Application
from clastic.render import (JSONRender,
JSONPRender,
render_basic,
BasicRender,
Table,
TabularRender)
from common import (hello_world_str,
hello_world_html,
hello_world_ctx,
complex_context)
import json
_CUR_DIR = os.path.dirname(__file__)
def test_json_render(render_json=None):
if render_json is None:
render_json = JSONRender(dev_mode=True)
app = Application([('/', hello_world_ctx, render_json),
('/<name>/', hello_world_ctx, render_json),
('/beta/<name>/', complex_context, render_json)])
yield ok_, callable(app.routes[0]._execute)
yield ok_, callable(app.routes[0]._render)
c = Client(app, BaseResponse)
resp = c.get('/')
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'world'
resp = c.get('/Kurt/')
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'Kurt'
resp = c.get('/beta/Rajkumar/')
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'Rajkumar'
yield ok_, resp_data['date']
yield ok_, len(resp_data) > 4
def test_jsonp_render(render_json=None):
if render_json is None:
render_json = JSONPRender(qp_name='callback', dev_mode=True)
app = Application([('/', hello_world_ctx, render_json),
('/<name>/', hello_world_ctx, render_json),
('/beta/<name>/', complex_context, render_json)])
c = Client(app, BaseResponse)
resp = c.get('/?callback=test_callback')
yield eq_, resp.status_code, 200
yield ok_, resp.data.startswith('test_callback')
yield ok_, 'world' in resp.data
resp = c.get('/?callback=test_callback')
yield eq_, resp.status_code, 200
yield ok_, resp.data.startswith('test_callback')
yield ok_, 'world' in resp.data
#def test_default_json_render():
# from clastic.render import render_json
# for t in test_json_render(render_json):
# yield t
def test_default_render():
app = Application([('/', hello_world_ctx, render_basic),
('/<name>/', hello_world_ctx, render_basic),
('/text/<name>/', hello_world_str, render_basic),
('/html/<name>/', hello_world_html, render_basic),
('/beta/<name>/', complex_context, render_basic)])
yield ok_, callable(app.routes[0]._execute)
yield ok_, callable(app.routes[0]._render)
c = Client(app, BaseResponse)
resp = c.get('/') # test simple json with endpoint default
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'world'
resp = c.get('/Kurt/') # test simple json with url param
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'Kurt'
resp = c.get('/beta/Rajkumar/') # test fancy json
yield eq_, resp.status_code, 200
resp_data = json.loads(resp.data)
yield eq_, resp_data['name'], 'Rajkumar'
yield ok_, resp_data['date']
yield ok_, len(resp_data) > 4
resp = c.get('/text/Noam/') # test text
yield eq_, resp.status_code, 200
yield eq_, resp.data, 'Hello, Noam!'
resp = c.get('/html/Asia/') # test basic html
yield eq_, resp.status_code, 200
yield ok_, 'text/html' in resp.headers['Content-Type']
def test_custom_table_render():
class BoldHTMLTable(Table):
def get_cell_html(self, value):
std_html = super(BoldHTMLTable, self).get_cell_html(value)
return '<b>' + std_html + '</b>'
custom_tr = TabularRender(table_type=BoldHTMLTable)
custom_render = BasicRender(tabular_render=custom_tr)
app = Application([('/', hello_world_ctx, custom_render)])
c = Client(app, BaseResponse)
resp = c.get('/?format=html')
yield eq_, resp.status_code, 200
assert '<b>' in resp.data
|
# -*- coding: utf-8 -*-
"""
Utilities and definitions for natsort, mostly all used to define
the _natsort_key function.
"""
from __future__ import (
print_function,
division,
unicode_literals,
absolute_import
)
# Std. lib imports.
import re
from math import isnan
from warnings import warn
from os import curdir, pardir
from os.path import split, splitext
from itertools import islice
from locale import localeconv
# Local imports.
from natsort.ns_enum import ns, _ns
from natsort.unicode_numbers import digits, numeric
from natsort.locale_help import locale_convert, grouper
from natsort.compat.pathlib import PurePath, has_pathlib
from natsort.compat.py23 import (
py23_str,
py23_zip,
PY_VERSION,
)
from natsort.compat.locale import (
dumb_sort,
use_pyicu,
null_string,
)
from natsort.compat.fastnumbers import (
fast_float,
fast_int,
isint,
isfloat,
)
# Group algorithm types for easy extraction
_NUMBER_ALGORITHMS = ns.FLOAT | ns.INT | ns.UNSIGNED | ns.SIGNED | ns.NOEXP
_ALL_BUT_PATH = (ns.F | ns.I | ns.U | ns.S | ns.N | ns.L |
ns.IC | ns.LF | ns.G | ns.UG | ns.TYPESAFE)
# The regex that locates floats - include Unicode numerals.
_float_sign_exp_re = r'([-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_sign_exp_re = _float_sign_exp_re.format(numeric)
_float_sign_exp_re = re.compile(_float_sign_exp_re, flags=re.U)
_float_nosign_exp_re = r'([0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_nosign_exp_re = _float_nosign_exp_re.format(numeric)
_float_nosign_exp_re = re.compile(_float_nosign_exp_re, flags=re.U)
_float_sign_noexp_re = r'([-+]?[0-9]*\.?[0-9]+|[{0}])'
_float_sign_noexp_re = _float_sign_noexp_re.format(numeric)
_float_sign_noexp_re = re.compile(_float_sign_noexp_re, flags=re.U)
_float_nosign_noexp_re = r'([0-9]*\.?[0-9]+|[{0}])'
_float_nosign_noexp_re = _float_nosign_noexp_re.format(numeric)
_float_nosign_noexp_re = re.compile(_float_nosign_noexp_re, flags=re.U)
_float_sign_exp_re_c = r'([-+]?[0-9]*[.,]?[0-9]+(?:[eE][-+]?[0-9]+)?)|[{0}]'
_float_sign_exp_re_c = _float_sign_exp_re_c.format(numeric)
_float_sign_exp_re_c = re.compile(_float_sign_exp_re_c, flags=re.U)
_float_nosign_exp_re_c = r'([0-9]*[.,]?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_nosign_exp_re_c = _float_nosign_exp_re_c.format(numeric)
_float_nosign_exp_re_c = re.compile(_float_nosign_exp_re_c, flags=re.U)
_float_sign_noexp_re_c = r'([-+]?[0-9]*[.,]?[0-9]+|[{0}])'
_float_sign_noexp_re_c = _float_sign_noexp_re_c.format(numeric)
_float_sign_noexp_re_c = re.compile(_float_sign_noexp_re_c, flags=re.U)
_float_nosign_noexp_re_c = r'([0-9]*[.,]?[0-9]+|[{0}])'
_float_nosign_noexp_re_c = _float_nosign_noexp_re_c.format(numeric)
_float_nosign_noexp_re_c = re.compile(_float_nosign_noexp_re_c, flags=re.U)
# Integer regexes - include Unicode digits.
_int_nosign_re = r'([0-9]+|[{0}])'.format(digits)
_int_nosign_re = re.compile(_int_nosign_re, flags=re.U)
_int_sign_re = r'([-+]?[0-9]+|[{0}])'.format(digits)
_int_sign_re = re.compile(_int_sign_re, flags=re.U)
# This dict will help select the correct regex and number conversion function.
_regex_and_num_function_chooser = {
(ns.F | ns.S, '.'): (_float_sign_exp_re, fast_float),
(ns.F | ns.S | ns.N, '.'): (_float_sign_noexp_re, fast_float),
(ns.F | ns.U, '.'): (_float_nosign_exp_re, fast_float),
(ns.F | ns.U | ns.N, '.'): (_float_nosign_noexp_re, fast_float),
(ns.I | ns.S, '.'): (_int_sign_re, fast_int),
(ns.I | ns.S | ns.N, '.'): (_int_sign_re, fast_int),
(ns.I | ns.U, '.'): (_int_nosign_re, fast_int),
(ns.I | ns.U | ns.N, '.'): (_int_nosign_re, fast_int),
(ns.F | ns.S, ','): (_float_sign_exp_re_c, fast_float),
(ns.F | ns.S | ns.N, ','): (_float_sign_noexp_re_c, fast_float),
(ns.F | ns.U, ','): (_float_nosign_exp_re_c, fast_float),
(ns.F | ns.U | ns.N, ','): (_float_nosign_noexp_re_c, fast_float),
(ns.I | ns.S, ','): (_int_sign_re, fast_int),
(ns.I | ns.S | ns.N, ','): (_int_sign_re, fast_int),
(ns.I | ns.U, ','): (_int_nosign_re, fast_int),
(ns.I | ns.U | ns.N, ','): (_int_nosign_re, fast_int),
}
# Dict to select checker function from converter function
_conv_to_check = {fast_float: isfloat, fast_int: isint}
def _do_decoding(s, encoding):
"""A function to decode a bytes string, or return the object as-is."""
try:
return s.decode(encoding)
except UnicodeError:
raise
except (AttributeError, TypeError):
return s
def _args_to_enum(**kwargs):
"""A function to convert input booleans to an enum-type argument."""
alg = 0
keys = ('number_type', 'signed', 'exp', 'as_path', 'py3_safe')
if any(x not in keys for x in kwargs):
x = set(kwargs) - set(keys)
raise TypeError('Invalid argument(s): ' + ', '.join(x))
if 'number_type' in kwargs and kwargs['number_type'] is not int:
msg = "The 'number_type' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.FLOAT', 'alg=ns.INT', or 'alg=ns.VERSION'"
warn(msg, DeprecationWarning)
alg |= (_ns['FLOAT'] * bool(kwargs['number_type'] is float))
alg |= (_ns['INT'] * bool(kwargs['number_type'] in (int, None)))
alg |= (_ns['SIGNED'] * (kwargs['number_type'] not in (float, None)))
if 'signed' in kwargs and kwargs['signed'] is not None:
msg = "The 'signed' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.SIGNED'."
warn(msg, DeprecationWarning)
alg |= (_ns['SIGNED'] * bool(kwargs['signed']))
if 'exp' in kwargs and kwargs['exp'] is not None:
msg = "The 'exp' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.NOEXP'."
warn(msg, DeprecationWarning)
alg |= (_ns['NOEXP'] * (not kwargs['exp']))
if 'as_path' in kwargs and kwargs['as_path'] is not None:
msg = "The 'as_path' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.PATH'."
warn(msg, DeprecationWarning)
alg |= (_ns['PATH'] * kwargs['as_path'])
if 'py3_safe' in kwargs and kwargs['py3_safe'] is not None:
msg = "The 'py3_safe' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.TYPESAFE'."
warn(msg, DeprecationWarning)
alg |= (_ns['TYPESAFE'] * kwargs['py3_safe'])
return alg
def _number_extracter(s, regex, numconv, py3_safe, use_locale, group_letters):
"""Helper to separate the string input into numbers and strings."""
conv_check = (numconv, _conv_to_check[numconv])
# Split the input string by numbers.
# If the input is not a string, TypeError is raised.
s = regex.split(s)
# Now convert the numbers to numbers, and leave strings as strings.
# Take into account locale if needed, and group letters if needed.
# Remove empty strings from the list.
if use_locale:
s = [locale_convert(x, conv_check, group_letters) for x in s if x]
elif group_letters:
s = [grouper(x, conv_check) for x in s if x]
else:
s = [numconv(x) for x in s if x]
# If the list begins with a number, lead with an empty string.
# This is used to get around the "unorderable types" issue.
if not s: # Return empty list for empty results.
return []
elif conv_check[1](s[0], num_only=True):
s = [null_string if use_locale else ''] + s
# The _py3_safe function inserts "" between numbers in the list,
# and is used to get around "unorderable types" in complex cases.
# It is a separate function that needs to be requested specifically
# because it is expensive to call.
return _py3_safe(s, use_locale, conv_check[1]) if py3_safe else s
def _path_splitter(s, _d_match=re.compile(r'\.\d').match):
"""Split a string into its path components. Assumes a string is a path."""
path_parts = []
p_append = path_parts.append
# Convert a pathlib PurePath object to a string.
if has_pathlib and isinstance(s, PurePath):
path_location = str(s)
else: # pragma: no cover
path_location = s
# Continue splitting the path from the back until we have reached
# '..' or '.', or until there is nothing left to split.
while path_location != curdir and path_location != pardir:
parent_path = path_location
path_location, child_path = split(parent_path)
if path_location == parent_path:
break
p_append(child_path)
# This last append is the base path.
# Only append if the string is non-empty.
if path_location:
p_append(path_location)
# We created this list in reversed order, so we now correct the order.
path_parts.reverse()
# Now, split off the file extensions using a similar method to above.
# Continue splitting off file extensions until we reach a decimal number
# or there are no more extensions.
base = path_parts.pop()
base_parts = []
b_append = base_parts.append
while True:
front = base
base, ext = splitext(front)
if _d_match(ext) or not ext:
# Reset base to before the split if the split is invalid.
base = front
break
b_append(ext)
b_append(base)
base_parts.reverse()
# Return the split parent paths and then the split basename.
return path_parts + base_parts
def _py3_safe(parsed_list, use_locale, check):
"""Insert '' between two numbers."""
length = len(parsed_list)
if length < 2:
return parsed_list
else:
new_list = [parsed_list[0]]
nl_append = new_list.append
for before, after in py23_zip(islice(parsed_list, 0, length-1),
islice(parsed_list, 1, None)):
if check(before, num_only=True) and check(after, num_only=True):
nl_append(null_string if use_locale else '')
nl_append(after)
return new_list
def _fix_nan(ret, alg):
"""Detect an NaN and replace or raise a ValueError."""
t = []
for r in ret:
if isfloat(r, num_only=True) and isnan(r):
if alg & _ns['NANLAST']:
t.append(float('+inf'))
else:
t.append(float('-inf'))
else:
t.append(r)
return tuple(t)
def _natsort_key(val, key, alg):
"""\
Key to sort strings and numbers naturally.
It works by separating out the numbers from the strings. This function for
internal use only. See the natsort_keygen documentation for details of each
parameter.
Parameters
----------
val : {str, unicode}
key : callable
alg : ns enum
Returns
-------
out : tuple
The modified value with numbers extracted.
"""
# Convert the arguments to the proper input tuple
try:
use_locale = alg & _ns['LOCALE']
inp_options = (alg & _NUMBER_ALGORITHMS,
localeconv()['decimal_point'] if use_locale else '.')
except TypeError:
msg = "_natsort_key: 'alg' argument must be from the enum 'ns'"
raise ValueError(msg+', got {0}'.format(py23_str(alg)))
# Get the proper regex and conversion function.
try:
regex, num_function = _regex_and_num_function_chooser[inp_options]
except KeyError: # pragma: no cover
if inp_options[1] not in ('.', ','): # pragma: no cover
raise ValueError("_natsort_key: currently natsort only supports "
"the decimal separators '.' and ','. "
"Please file a bug report.")
else:
raise
else:
# Apply key if needed.
if key is not None:
val = key(val)
# If this is a path, convert it.
# An AttrubuteError is raised if not a string.
split_as_path = False
if alg & _ns['PATH']:
try:
val = _path_splitter(val)
except AttributeError:
pass
else:
# Record that this string was split as a path so that
# we don't set PATH in the recursive call.
split_as_path = True
# Assume the input are strings, which is the most common case.
# Apply the string modification if needed.
orig_val = val
try:
lowfirst = alg & _ns['LOWERCASEFIRST']
dumb = dumb_sort() if use_locale else False
if use_locale and dumb and not lowfirst: # pragma: no cover
val = val.swapcase() # Compensate for bad locale lib.
elif lowfirst and not (use_locale and dumb):
val = val.swapcase()
if alg & _ns['IGNORECASE']:
val = val.casefold() if PY_VERSION >= 3.3 else val.lower()
gl = alg & _ns['GROUPLETTERS']
ret = tuple(_number_extracter(val,
regex,
num_function,
alg & _ns['TYPESAFE'],
use_locale,
gl or (use_locale and dumb)))
# Handle NaN.
if any(isfloat(x, num_only=True) and isnan(x) for x in ret):
ret = _fix_nan(ret, alg)
# For UNGROUPLETTERS, so the high level grouping can occur
# based on the first letter of the string.
# Do no locale transformation of the characters.
if use_locale and alg & _ns['UNGROUPLETTERS']:
if not ret:
return (ret, ret)
elif ret[0] == null_string:
return ((b'' if use_pyicu else '',), ret)
elif dumb: # pragma: no cover
if lowfirst:
return ((orig_val[0].swapcase(),), ret)
else:
return ((orig_val[0],), ret)
else:
return ((val[0],), ret)
else:
return ret
except (TypeError, AttributeError):
# Check if it is a bytes type, and if so return as a
# one element tuple.
if type(val) in (bytes,):
return (val.lower(),) if alg & _ns['IGNORECASE'] else (val,)
# If not strings, assume it is an iterable that must
# be parsed recursively. Do not apply the key recursively.
# If this string was split as a path, turn off 'PATH'.
try:
was_path = alg & _ns['PATH']
newalg = alg & _ALL_BUT_PATH
newalg |= (was_path * (not split_as_path))
return tuple([_natsort_key(x, None, newalg) for x in val])
# If there is still an error, it must be a number.
# Return as-is, with a leading empty string.
except TypeError:
n = null_string if use_locale else ''
if isfloat(val, num_only=True) and isnan(val):
val = _fix_nan([val], alg)[0]
return ((n, val,),) if alg & _ns['PATH'] else (n, val,)
|
# /usr/bin/env python
"""
This module has utility functions for gathering up the static content
that is defined by XModules and XModuleDescriptors (javascript and css)
"""
import errno
import hashlib
import logging
import os
import sys
from collections import defaultdict
from docopt import docopt
from path import Path as path
from xmodule.x_module import XModuleDescriptor
LOG = logging.getLogger(__name__)
def write_module_styles(output_root):
"""Write all registered XModule css, sass, and scss files to output root."""
return _write_styles('.xmodule_display', output_root, _list_modules())
def write_module_js(output_root):
"""Write all registered XModule js and coffee files to output root."""
return _write_js(output_root, _list_modules())
def write_descriptor_styles(output_root):
"""Write all registered XModuleDescriptor css, sass, and scss files to output root."""
return _write_styles('.xmodule_edit', output_root, _list_descriptors())
def write_descriptor_js(output_root):
"""Write all registered XModuleDescriptor js and coffee files to output root."""
return _write_js(output_root, _list_descriptors())
def _list_descriptors():
"""Return a list of all registered XModuleDescriptor classes."""
return [
desc for desc in [
desc for (_, desc) in XModuleDescriptor.load_classes()
]
]
def _list_modules():
"""Return a list of all registered XModule classes."""
return [
desc.module_class
for desc
in _list_descriptors()
]
def _ensure_dir(directory):
"""Ensure that `directory` exists."""
try:
os.makedirs(directory)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def _write_styles(selector, output_root, classes):
"""
Write the css fragments from all XModules in `classes`
into `output_root` as individual files, hashed by the contents to remove
duplicates
"""
contents = {}
css_fragments = defaultdict(set)
for class_ in classes:
class_css = class_.get_css()
for filetype in ('sass', 'scss', 'css'):
for idx, fragment in enumerate(class_css.get(filetype, [])):
css_fragments[idx, filetype, fragment].add(class_.__name__)
css_imports = defaultdict(set)
for (idx, filetype, fragment), classes in sorted(css_fragments.items()):
fragment_name = "{idx:0=3d}-{hash}.{type}".format(
idx=idx,
hash=hashlib.md5(fragment).hexdigest(),
type=filetype)
# Prepend _ so that sass just includes the files into a single file
filename = '_' + fragment_name
contents[filename] = fragment
for class_ in classes:
css_imports[class_].add(fragment_name)
module_styles_lines = [
"@import 'bourbon/bourbon';",
"@import 'lms/theme/variables';",
]
for class_, fragment_names in css_imports.items():
module_styles_lines.append("""{selector}.xmodule_{class_} {{""".format(
class_=class_, selector=selector
))
module_styles_lines.extend(' @import "{0}";'.format(name) for name in fragment_names)
module_styles_lines.append('}')
contents['_module-styles.scss'] = '\n'.join(module_styles_lines)
_write_files(output_root, contents)
def _write_js(output_root, classes):
"""
Write the javascript fragments from all XModules in `classes`
into `output_root` as individual files, hashed by the contents to remove
duplicates
"""
contents = {}
js_fragments = set()
for class_ in classes:
module_js = class_.get_javascript()
# It will enforce 000 prefix for xmodule.js.
js_fragments.add((0, 'js', module_js.get('xmodule_js')))
for filetype in ('coffee', 'js'):
for idx, fragment in enumerate(module_js.get(filetype, [])):
js_fragments.add((idx + 1, filetype, fragment))
for idx, filetype, fragment in sorted(js_fragments):
filename = "{idx:0=3d}-{hash}.{type}".format(
idx=idx,
hash=hashlib.md5(fragment).hexdigest(),
type=filetype)
contents[filename] = fragment
_write_files(output_root, contents, {'.coffee': '.js'})
return [output_root / filename for filename in contents.keys()]
def _write_files(output_root, contents, generated_suffix_map=None):
"""
Write file contents to output root.
Any files not listed in contents that exists in output_root will be deleted,
unless it matches one of the patterns in `generated_suffix_map`.
output_root (path): The root directory to write the file contents in
contents (dict): A map from filenames to file contents to be written to the output_root
generated_suffix_map (dict): Optional. Maps file suffix to generated file suffix.
For any file in contents, if the suffix matches a key in `generated_suffix_map`,
then the same filename with the suffix replaced by the value from `generated_suffix_map`
will be ignored
"""
_ensure_dir(output_root)
to_delete = set(file.basename() for file in output_root.files()) - set(contents.keys())
if generated_suffix_map:
for output_file in contents.keys():
for suffix, generated_suffix in generated_suffix_map.items():
if output_file.endswith(suffix):
to_delete.discard(output_file.replace(suffix, generated_suffix))
for extra_file in to_delete:
(output_root / extra_file).remove_p()
for filename, file_content in contents.iteritems():
output_file = output_root / filename
not_file = not output_file.isfile()
# not_file is included to short-circuit this check, because
# read_md5 depends on the file already existing
write_file = not_file or output_file.read_md5() != hashlib.md5(file_content).digest()
if write_file:
LOG.debug("Writing %s", output_file)
output_file.write_bytes(file_content)
else:
LOG.debug("%s unchanged, skipping", output_file)
def main():
"""
Generate
Usage: static_content.py <output_root>
"""
from django.conf import settings
settings.configure()
args = docopt(main.__doc__)
root = path(args['<output_root>'])
write_descriptor_js(root / 'descriptors/js')
write_descriptor_styles(root / 'descriptors/css')
write_module_js(root / 'modules/js')
write_module_styles(root / 'modules/css')
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Cache accesses to GSDStorage locally.
Operations are provided to read/write whole files and to
read/write strings.
Read from GSDStorage if nothing exists locally.
"""
import os
import re
import file_tools
KEY_PATTERN = re.compile('^[A-Za-z0-9_/.]+$')
def ValidateKey(key):
if KEY_PATTERN.match(key) is None:
raise KeyError('Invalid storage key "%s"' % key)
def LocalFileURL(local_file):
abs_path = os.path.abspath(local_file)
if not abs_path.startswith('/'):
# Windows paths needs an extra slash for the file protocol.
return 'file:///' + abs_path
else:
return 'file://' + abs_path
class LocalStorageCache(object):
"""A caching wrapper for reading a GSDStorage object or storing locally.
Allow reading/writing to key, value pairs in local files.
Reads fall back to remote storage.
Restricts keys to a limited regex.
Is not atomic in the face of concurrent writers / readers on Windows.
"""
def __init__(self, cache_path, storage):
"""Init for this class.
Args:
cache_path: Path to a database to store a local cache in.
storage: A GSDStorage style object to fallback to for reads.
"""
self._cache_path = os.path.abspath(cache_path)
file_tools.MakeDirectoryIfAbsent(self._cache_path)
self._storage = storage
def PutFile(self, path, key):
"""Write a file to storage.
Args:
path: Path of the file to write.
key: Key to store file under.
Returns:
URL written to.
"""
return self.PutData(file_tools.ReadFile(path), key)
def PutData(self, data, key):
"""Write data to storage.
Args:
data: Data to store.
key: Key to store file under.
Returns:
URL written to.
"""
ValidateKey(key)
cache_file = os.path.join(self._cache_path, key)
cache_dir = os.path.dirname(cache_file)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
file_tools.AtomicWriteFile(data, cache_file)
return LocalFileURL(cache_file)
def GetFile(self, key, path):
"""Read a file from storage.
Args:
key: Key to store file under.
path: Destination filename.
Returns:
URL used on success or None for failure.
"""
ValidateKey(key)
cache_file = os.path.join(self._cache_path, key)
if os.path.exists(cache_file):
data = file_tools.ReadFile(cache_file)
file_tools.WriteFile(data, path)
return LocalFileURL(cache_file)
else:
return self._storage.GetFile(key, path)
def GetData(self, key):
"""Read data from global storage.
Args:
key: Key to store file under.
Returns:
Data from storage, or None for failure.
"""
ValidateKey(key)
cache_file = os.path.join(self._cache_path, key)
if os.path.exists(cache_file):
return file_tools.ReadFile(cache_file)
else:
return self._storage.GetData(key)
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Tests for pass manager visualization tool."""
import unittest
import os
from qiskit.transpiler import CouplingMap, Layout
from qiskit.transpiler.passmanager import PassManager
from qiskit import QuantumRegister
from qiskit.transpiler.passes import Unroller
from qiskit.transpiler.passes import CheckMap
from qiskit.transpiler.passes import CXDirection
from qiskit.transpiler.passes import SetLayout
from qiskit.transpiler.passes import TrivialLayout
from qiskit.transpiler.passes import BarrierBeforeFinalMeasurements
from qiskit.transpiler.passes import FullAncillaAllocation
from qiskit.transpiler.passes import EnlargeWithAncilla
from qiskit.transpiler.passes import RemoveResetInZeroState
from .visualization import QiskitVisualizationTestCase, path_to_diagram_reference
try:
import subprocess
_PROC = subprocess.Popen(['dot', '-V'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_PROC.communicate()
if _PROC.returncode != 0:
HAS_GRAPHVIZ = False
else:
HAS_GRAPHVIZ = True
except Exception: # pylint: disable=broad-except
# this is raised when the dot command cannot be found, which means GraphViz
# isn't installed
HAS_GRAPHVIZ = False
class TestPassManagerDrawer(QiskitVisualizationTestCase):
"""Qiskit pass manager drawer tests."""
def setUp(self):
coupling = [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 6]]
coupling_map = CouplingMap(couplinglist=coupling)
basis_gates = ['u1', 'u3', 'u2', 'cx']
qr = QuantumRegister(7, 'q')
layout = Layout({qr[i]: i for i in range(coupling_map.size())})
# Create a pass manager with a variety of passes and flow control structures
self.pass_manager = PassManager()
self.pass_manager.append(SetLayout(layout))
self.pass_manager.append(TrivialLayout(coupling_map), condition=lambda x: True)
self.pass_manager.append(FullAncillaAllocation(coupling_map))
self.pass_manager.append(EnlargeWithAncilla())
self.pass_manager.append(Unroller(basis_gates))
self.pass_manager.append(CheckMap(coupling_map))
self.pass_manager.append(BarrierBeforeFinalMeasurements(), do_while=lambda x: False)
self.pass_manager.append(CXDirection(coupling_map))
self.pass_manager.append(RemoveResetInZeroState())
@unittest.skipIf(not HAS_GRAPHVIZ,
'Graphviz not installed.')
def test_pass_manager_drawer_basic(self):
"""Test to see if the drawer draws a normal pass manager correctly"""
filename = self._get_resource_path('current_standard.dot')
self.pass_manager.draw(filename=filename, raw=True)
self.assertFilesAreEqual(filename, path_to_diagram_reference('pass_manager_standard.dot'))
os.remove(filename)
@unittest.skipIf(not HAS_GRAPHVIZ, 'Graphviz not installed.')
def test_pass_manager_drawer_style(self):
"""Test to see if the colours are updated when provided by the user"""
# set colours for some passes, but leave others to take the default values
style = {SetLayout: 'cyan',
CheckMap: 'green',
EnlargeWithAncilla: 'pink',
RemoveResetInZeroState: 'grey'}
filename = self._get_resource_path('current_style.dot')
self.pass_manager.draw(filename=filename, style=style, raw=True)
self.assertFilesAreEqual(filename, path_to_diagram_reference('pass_manager_style.dot'))
os.remove(filename)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
# Copyright 2020 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for the Command substitution."""
import os
import shlex
import subprocess
from typing import Iterable
from typing import List
from typing import Text
import launch.logging
from .substitution_failure import SubstitutionFailure
from ..frontend.expose import expose_substitution
from ..launch_context import LaunchContext
from ..some_substitutions_type import SomeSubstitutionsType
from ..substitution import Substitution
@expose_substitution('command')
class Command(Substitution):
"""
Substitution that gets the output of a command as a string.
If the command is not found or fails a `SubstitutionFailure` error is raised.
Behavior on stderr output is configurable, see constructor.
"""
def __init__(
self,
command: SomeSubstitutionsType,
*,
on_stderr: SomeSubstitutionsType = 'fail'
) -> None:
"""
Construct a command substitution.
:param command: command to be executed. The substitutions will be performed, and
`shlex.split` will be used on the result.
:param on_stderr: specifies what to do when there is stderr output.
Can be one of:
- 'fail': raises `SubstitutionFailere` when stderr output is detected.
- 'ignore': `stderr` output is ignored.
- 'warn': The `stderr` output is ignored, but a warning is logged if detected.
- 'capture': The `stderr` output will be captured, together with stdout.
It can also be a substitution, that results in one of those four options.
"""
super().__init__()
from ..utilities import normalize_to_list_of_substitutions # import here to avoid loop
self.__command = normalize_to_list_of_substitutions(command)
self.__on_stderr = normalize_to_list_of_substitutions(on_stderr)
@classmethod
def parse(cls, data: Iterable[SomeSubstitutionsType]):
"""Parse `Command` substitution."""
if len(data) < 1 or len(data) > 2:
raise ValueError('command substitution expects 1 or 2 arguments')
kwargs = {'command': data[0]}
if len(data) == 2:
kwargs['on_stderr'] = data[1]
return cls, kwargs
@property
def command(self) -> List[Substitution]:
"""Getter for command."""
return self.__command
@property
def on_stderr(self) -> List[Substitution]:
"""Getter for on_stderr."""
return self.__on_stderr
def describe(self) -> Text:
"""Return a description of this substitution as a string."""
return 'Command({})'.format(' + '.join([sub.describe() for sub in self.command]))
def perform(self, context: LaunchContext) -> Text:
"""Perform the substitution by running the command and capturing its output."""
from ..utilities import perform_substitutions # import here to avoid loop
command_str = perform_substitutions(context, self.command)
if os.name != 'nt':
command = shlex.split(command_str)
else:
command = command_str
on_stderr = perform_substitutions(context, self.on_stderr)
if on_stderr not in ('fail', 'ignore', 'warn', 'capture'):
raise SubstitutionFailure(
"expected 'on_stderr' to be one of: 'fail', 'ignore', 'warn' or 'capture'")
stderr = subprocess.PIPE
if on_stderr == 'capture':
stderr = subprocess.STDOUT
try:
result = subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=stderr,
universal_newlines=True)
except FileNotFoundError as ex:
raise SubstitutionFailure(f'file not found: {ex}')
if result.returncode != 0:
on_error_message = f'executed command failed. Command: {command_str}'
if result.stderr:
on_error_message += f'\nCaptured stderr output: {result.stderr}'
raise SubstitutionFailure(on_error_message)
if result.stderr:
on_stderr_message = f'executed command showed stderr output.' \
f' Command: {command_str}\n' \
f'Captured stderr output:\n{result.stderr}'
if on_stderr == 'fail':
raise SubstitutionFailure(on_stderr_message)
elif on_stderr == 'warn':
launch.logging.get_logger().warning(on_stderr_message)
return result.stdout
|
__all__ = ['convex_hull_image', 'convex_hull_object']
import numpy as np
from ..measure._pnpoly import grid_points_in_poly
from ._convex_hull import possible_hull
from ..measure._label import label
from ..util import unique_rows
try:
from scipy.spatial import Delaunay
except ImportError:
Delaunay = None
def convex_hull_image(image):
"""Compute the convex hull image of a binary image.
The convex hull is the set of pixels included in the smallest convex
polygon that surround all white pixels in the input image.
Parameters
----------
image : (M, N) array
Binary input image. This array is cast to bool before processing.
Returns
-------
hull : (M, N) array of bool
Binary image with pixels in convex hull set to True.
References
----------
.. [1] http://blogs.mathworks.com/steve/2011/10/04/binary-image-convex-hull-algorithm-notes/
"""
if Delaunay is None:
raise ImportError("Could not import scipy.spatial.Delaunay, "
"only available in scipy >= 0.9.")
# Here we do an optimisation by choosing only pixels that are
# the starting or ending pixel of a row or column. This vastly
# limits the number of coordinates to examine for the virtual hull.
coords = possible_hull(image.astype(np.uint8))
N = len(coords)
# Add a vertex for the middle of each pixel edge
coords_corners = np.empty((N * 4, 2))
for i, (x_offset, y_offset) in enumerate(zip((0, 0, -0.5, 0.5),
(-0.5, 0.5, 0, 0))):
coords_corners[i * N:(i + 1) * N] = coords + [x_offset, y_offset]
# repeated coordinates can *sometimes* cause problems in
# scipy.spatial.Delaunay, so we remove them.
coords = unique_rows(coords_corners)
# Subtract offset
offset = coords.mean(axis=0)
coords -= offset
# Find the convex hull
chull = Delaunay(coords).convex_hull
v = coords[np.unique(chull)]
# Sort vertices clock-wise
v_centred = v - v.mean(axis=0)
angles = np.arctan2(v_centred[:, 0], v_centred[:, 1])
v = v[np.argsort(angles)]
# Add back offset
v += offset
# For each pixel coordinate, check whether that pixel
# lies inside the convex hull
mask = grid_points_in_poly(image.shape[:2], v)
return mask
def convex_hull_object(image, neighbors=8):
"""Compute the convex hull image of individual objects in a binary image.
The convex hull is the set of pixels included in the smallest convex
polygon that surround all white pixels in the input image.
Parameters
----------
image : ndarray
Binary input image.
neighbors : {4, 8}, int
Whether to use 4- or 8-connectivity.
Returns
-------
hull : ndarray of bool
Binary image with pixels in convex hull set to True.
Notes
-----
This function uses skimage.morphology.label to define unique objects,
finds the convex hull of each using convex_hull_image, and combines
these regions with logical OR. Be aware the convex hulls of unconnected
objects may overlap in the result. If this is suspected, consider using
convex_hull_image separately on each object.
"""
if neighbors != 4 and neighbors != 8:
raise ValueError('Neighbors must be either 4 or 8.')
labeled_im = label(image, neighbors, background=0)
convex_obj = np.zeros(image.shape, dtype=bool)
convex_img = np.zeros(image.shape, dtype=bool)
for i in range(0, labeled_im.max() + 1):
convex_obj = convex_hull_image(labeled_im == i)
convex_img = np.logical_or(convex_img, convex_obj)
return convex_img
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from contextlib import contextmanager
from optparse import OptionParser
from pex.bin.pex import configure_clp, configure_clp_pex_resolution
from pex.fetcher import PyPIFetcher
from pex.package import SourcePackage, WheelPackage
from pex.resolver_options import ResolverOptionsBuilder
from pex.sorter import Sorter
@contextmanager
def parser_pair():
builder = ResolverOptionsBuilder()
parser = OptionParser()
yield builder, parser
def test_clp_no_pypi_option():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
assert len(builder._fetchers) == 1
options, _ = parser.parse_args(args=['--no-pypi'])
assert len(builder._fetchers) == 0, '--no-pypi should remove fetchers.'
assert options.repos == builder._fetchers
def test_clp_pypi_option_duplicate():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
assert len(builder._fetchers) == 1
options, _ = parser.parse_args(args=['--pypi'])
assert len(builder._fetchers) == 1
assert options.repos == builder._fetchers
# TODO(wickman) We should probably add fetchers in order.
def test_clp_repo_option():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
assert len(builder._fetchers) == 1
options, _ = parser.parse_args(args=['-f', 'http://www.example.com'])
assert len(builder._fetchers) == 2
assert builder._fetchers == options.repos
def test_clp_index_option():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
assert len(builder._fetchers) == 1
options, _ = parser.parse_args(args=['-i', 'http://www.example.com'])
assert len(builder._fetchers) == 2
assert builder._fetchers == options.repos
assert builder._fetchers[1] == PyPIFetcher('http://www.example.com')
def test_clp_build_precedence():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
assert builder._precedence == Sorter.DEFAULT_PACKAGE_PRECEDENCE
parser.parse_args(args=['--no-build'])
assert SourcePackage not in builder._precedence
parser.parse_args(args=['--build'])
assert SourcePackage in builder._precedence
options, _ = parser.parse_args(args=['--no-wheel'])
assert WheelPackage not in builder._precedence
assert not options.use_wheel
options, _ = parser.parse_args(args=['--wheel'])
assert WheelPackage in builder._precedence
assert options.use_wheel
# Make sure that we're doing append and not replace
def test_clp_requirements_txt():
parser, builder = configure_clp()
options, _ = parser.parse_args(args='-r requirements1.txt -r requirements2.txt'.split())
assert options.requirement_files == ['requirements1.txt', 'requirements2.txt']
def test_clp_constraints_txt():
parser, builder = configure_clp()
options, _ = parser.parse_args(args='--constraint requirements1.txt'.split())
assert options.constraint_files == ['requirements1.txt']
def test_clp_prereleases():
with parser_pair() as (builder, parser):
configure_clp_pex_resolution(parser, builder)
options, _ = parser.parse_args(args=[])
assert not builder._allow_prereleases
options, _ = parser.parse_args(args=['--no-pre'])
assert not builder._allow_prereleases
options, _ = parser.parse_args(args=['--pre'])
assert builder._allow_prereleases
|
# -*- coding: utf-8 -*-
from django.core.exceptions import FieldError
from hvad.forms import TranslatableModelForm, TranslatableModelFormMetaclass
from hvad.test_utils.context_managers import LanguageOverride
from hvad.test_utils.testcase import NaniTestCase
from testproject.app.models import Normal
from django.db import models
class NormalForm(TranslatableModelForm):
class Meta:
model = Normal
fields = ['shared_field', 'translated_field']
class NormalMediaForm(TranslatableModelForm):
class Meta:
model = Normal
class Media:
css = {
'all': ('layout.css',)
}
class NormalFormExclude(TranslatableModelForm):
class Meta:
model = Normal
exclude = ['shared_field']
class FormTests(NaniTestCase):
def test_nontranslatablemodelform(self):
# Make sure that TranslatableModelForm won't accept a regular model
# "Fake" model to use for the TranslatableModelForm
class NonTranslatableModel(models.Model):
field = models.CharField(max_length=128)
# Meta class for use below
class Meta:
model = NonTranslatableModel
# Make sure we do indeed get an exception, if we try to initialise it
self.assertRaises(TypeError,
TranslatableModelFormMetaclass,
'NonTranslatableModelForm', (TranslatableModelForm,),
{'Meta': Meta}
)
def test_normal_model_form_instantiation(self):
# Basic example and checking it gives us all the fields needed
form = NormalForm()
self.assertTrue("translated_field" in form.fields)
self.assertTrue("shared_field" in form.fields)
self.assertTrue("translated_field" in form.base_fields)
self.assertTrue("shared_field" in form.base_fields)
self.assertFalse(form.is_valid())
# Check if it works with media argument too
form = NormalMediaForm()
self.assertFalse(form.is_valid())
self.assertTrue("layout.css" in str(form.media))
# Check if it works with an instance of Normal
form = NormalForm(instance=Normal())
self.assertFalse(form.is_valid())
def test_normal_model_form_valid(self):
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data)
self.assertTrue(form.is_valid(), form.errors.as_text())
self.assertTrue("translated_field" in form.fields)
self.assertTrue("shared_field" in form.fields)
self.assertTrue(TRANSLATED in form.clean()["translated_field"])
self.assertTrue(SHARED in form.clean()["shared_field"])
def test_normal_model_form_initaldata_instance(self):
# Check if it accepts inital data and instance
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data, instance=Normal(), initial=data)
self.assertTrue(form.is_valid(), form.errors.as_text())
def test_normal_model_form_existing_instance(self):
# Check if it works with an existing instance of Normal
SHARED = 'Shared'
TRANSLATED = 'English'
instance = Normal.objects.language("en").create(shared_field=SHARED, translated_field=TRANSLATED)
form = NormalForm(instance=instance)
self.assertFalse(form.is_valid())
self.assertTrue(SHARED in form.as_p())
self.assertTrue(TRANSLATED in form.as_p())
def test_normal_model_form_save(self):
with LanguageOverride('en'):
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data)
# tested a non-translated ModelForm, and that takes 7 queries.
with self.assertNumQueries(2):
obj = form.save()
with self.assertNumQueries(0):
self.assertEqual(obj.shared_field, SHARED)
self.assertEqual(obj.translated_field, TRANSLATED)
self.assertNotEqual(obj.pk, None)
def test_no_language_code_in_fields(self):
with LanguageOverride("en"):
form = NormalForm()
self.assertFalse(form.fields.has_key("language_code"))
form = NormalMediaForm()
self.assertFalse(form.fields.has_key("language_code"))
form = NormalFormExclude()
self.assertFalse(form.fields.has_key("language_code"))
def test_form_wrong_field_in_class(self):
with LanguageOverride("en"):
def create_wrong_form():
class WrongForm(TranslatableModelForm):
class Meta:
model = Normal
fields = ['a_field_that_doesnt_exist']
form = WrongForm()
self.assertRaises(FieldError, create_wrong_form)
|
# Copyright 2018-2020 The Wazo Authors (see the AUTHORS file)
# SPDX-License-Identifier: GPL-3.0-or-later
import time
import smtplib
from collections import namedtuple
from email import utils as email_utils
from email.mime.text import MIMEText
from wazo_auth.services.helpers import BaseService
EmailDestination = namedtuple('EmailDestination', ['name', 'address'])
# NOTE(sileht): default socket timeout is None on linux
# Our client http client is 10s, since sending mail is currently synchronous
# we have to be sure we return before the 10s, so we set the SMTP timeout.
SMTP_TIMEOUT = 4
class EmailService(BaseService):
def __init__(self, dao, tenant_uuid, config, template_formatter):
super().__init__(dao, tenant_uuid)
self._formatter = template_formatter
self._smtp_host = config['smtp']['hostname']
self._smtp_port = config['smtp']['port']
self._confirmation_token_expiration = config['email_confirmation_expiration']
self._reset_token_expiration = config['password_reset_expiration']
self._confirmation_from = EmailDestination(
config['email_confirmation_from_name'],
config['email_confirmation_from_address'],
)
self._password_reset_from = EmailDestination(
config['password_reset_from_name'], config['password_reset_from_address']
)
def confirm(self, email_uuid):
self._dao.email.confirm(email_uuid)
def send_confirmation_email(
self, username, email_uuid, email_address, connection_params
):
template_context = dict(connection_params)
template_context.update(
{
'token': self._new_email_confirmation_token(email_uuid),
'username': username,
'email_uuid': email_uuid,
'email_address': email_address,
}
)
body = self._formatter.format_confirmation_email(template_context)
subject = self._formatter.format_confirmation_subject(template_context)
to = EmailDestination(username, email_address)
self._send_msg(to, self._confirmation_from, subject, body)
def send_reset_email(self, user_uuid, username, email_address, connection_params):
template_context = dict(connection_params)
template_context.update(
{
'token': self._new_email_reset_token(user_uuid),
'username': username,
'user_uuid': user_uuid,
'email_address': email_address,
}
)
body = self._formatter.format_password_reset_email(template_context)
subject = self._formatter.format_password_reset_subject(template_context)
to = EmailDestination(username, email_address)
self._send_msg(to, self._confirmation_from, subject, body)
def _send_msg(self, to, from_, subject, body):
msg = MIMEText(body)
msg['To'] = email_utils.formataddr(to)
msg['From'] = email_utils.formataddr(from_)
msg['Subject'] = subject
with smtplib.SMTP(
self._smtp_host, self._smtp_port, timeout=SMTP_TIMEOUT
) as server:
server.sendmail(from_.address, [to.address], msg.as_string())
def _new_email_confirmation_token(self, email_uuid):
acl = 'auth.emails.{}.confirm.edit'.format(email_uuid)
return self._new_generic_token(self._confirmation_token_expiration, acl)
def _new_email_reset_token(self, user_uuid):
acl = 'auth.users.password.reset.{}.create'.format(user_uuid)
return self._new_generic_token(self._reset_token_expiration, acl)
def _new_generic_token(self, expiration, *acl):
t = time.time()
token_payload = {
'auth_id': 'wazo-auth',
'pbx_user_uuid': None,
'xivo_uuid': None,
'expire_t': t + expiration,
'issued_t': t,
'acl': acl,
'user_agent': 'wazo-auth-email-reset',
'remote_addr': '',
}
session_payload = {}
token_uuid, session_uuid = self._dao.token.create(
token_payload, session_payload
)
return token_uuid
|
#!/usr/bin/env python3
"""Python binding of Joystick wrapper of LetMeCreate library."""
import ctypes
_LIB = ctypes.CDLL('libletmecreate_click.so')
def get_x():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the X position from the
chip.
"""
pos_x = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_x(ctypes.byref(pos_x))
if ret < 0:
raise Exception("joystick click get x failed")
return pos_x.value
def get_y():
"""Returns the Y position of the joystick.
Note: An exception is thrown if it fails to read the Y position from the
chip.
"""
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_y(ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get y failed")
return pos_y.value
def get_position():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the position from the
chip.
"""
pos_x = ctypes.c_int8(0)
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_position(ctypes.byref(pos_x),
ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get position failed")
return (pos_x.value, pos_y.value)
|
###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2009-2010, James McCoy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Includes wrappers for commands.
"""
import time
import Queue
import types
import getopt
import inspect
import threading
import multiprocessing
from . import callbacks, conf, ircdb, ircmsgs, ircutils, log, utils, world
###
# Non-arg wrappers -- these just change the behavior of a command without
# changing the arguments given to it.
###
# Thread has to be a non-arg wrapper because by the time we're parsing and
# validating arguments, we're inside the function we'd want to thread.
def thread(f):
"""Makes sure a command spawns a thread when called."""
def newf(self, irc, msg, args, *L, **kwargs):
if world.isMainThread():
targetArgs = (self.callingCommand, irc, msg, args) + tuple(L)
t = callbacks.CommandThread(target=self._callCommand,
args=targetArgs, kwargs=kwargs)
t.start()
else:
f(self, irc, msg, args, *L, **kwargs)
return utils.python.changeFunctionName(newf, f.func_name, f.__doc__)
class ProcessTimeoutError(Exception):
"""Gets raised when a process is killed due to timeout."""
pass
def process(f, *args, **kwargs):
"""Runs a function <f> in a subprocess.
Several extra keyword arguments can be supplied.
<pn>, the pluginname, and <cn>, the command name, are strings used to
create the process name, for identification purposes.
<timeout>, if supplied, limits the length of execution of target
function to <timeout> seconds."""
timeout = kwargs.pop('timeout', None)
q = multiprocessing.Queue()
def newf(f, q, *args, **kwargs):
try:
r = f(*args, **kwargs)
q.put(r)
except Exception as e:
q.put(e)
targetArgs = (f, q,) + args
p = callbacks.CommandProcess(target=newf,
args=targetArgs, kwargs=kwargs)
p.start()
p.join(timeout)
if p.is_alive():
p.terminate()
raise ProcessTimeoutError, "%s aborted due to timeout." % (p.name,)
try:
v = q.get(block=False)
except Queue.Empty:
v = "Nothing returned."
if isinstance(v, Exception):
v = "Error: " + str(v)
return v
def regexp_wrapper(s, reobj, timeout, plugin_name, fcn_name):
'''A convenient wrapper to stuff regexp search queries through a subprocess.
This is used because specially-crafted regexps can use exponential time
and hang the bot.'''
def re_bool(s, reobj):
"""Since we can't enqueue match objects into the multiprocessing queue,
we'll just wrap the function to return bools."""
if reobj.search(s) is not None:
return True
else:
return False
try:
v = process(re_bool, s, reobj, timeout=timeout, pn=plugin_name, cn=fcn_name)
return v
except ProcessTimeoutError:
return False
class UrlSnarfThread(world.SupyThread):
def __init__(self, *args, **kwargs):
assert 'url' in kwargs
kwargs['name'] = 'Thread #%s (for snarfing %s)' % \
(world.threadsSpawned, kwargs.pop('url'))
super(UrlSnarfThread, self).__init__(*args, **kwargs)
self.setDaemon(True)
def run(self):
try:
super(UrlSnarfThread, self).run()
except utils.web.Error, e:
log.debug('Exception in urlSnarfer: %s', utils.exnToString(e))
class SnarfQueue(ircutils.FloodQueue):
timeout = conf.supybot.snarfThrottle
def key(self, channel):
return channel
_snarfed = SnarfQueue()
class SnarfIrc(object):
def __init__(self, irc, channel, url):
self.irc = irc
self.url = url
self.channel = channel
def __getattr__(self, attr):
return getattr(self.irc, attr)
def reply(self, *args, **kwargs):
_snarfed.enqueue(self.channel, self.url)
return self.irc.reply(*args, **kwargs)
# This lock is used to serialize the calls to snarfers, so
# earlier snarfers are guaranteed to beat out later snarfers.
_snarfLock = threading.Lock()
def urlSnarfer(f):
"""Protects the snarfer from loops (with other bots) and whatnot."""
def newf(self, irc, msg, match, *L, **kwargs):
url = match.group(0)
channel = msg.args[0]
if not irc.isChannel(channel) or (ircmsgs.isCtcp(msg) and not
ircmsgs.isAction(msg)):
return
if ircdb.channels.getChannel(channel).lobotomized:
self.log.debug('Not snarfing in %s: lobotomized.', channel)
return
if _snarfed.has(channel, url):
self.log.info('Throttling snarf of %s in %s.', url, channel)
return
irc = SnarfIrc(irc, channel, url)
def doSnarf():
_snarfLock.acquire()
try:
# This has to be *after* we've acquired the lock so we can be
# sure that all previous urlSnarfers have already run to
# completion.
if msg.repliedTo:
self.log.debug('Not snarfing, msg is already repliedTo.')
return
f(self, irc, msg, match, *L, **kwargs)
finally:
_snarfLock.release()
if threading.currentThread() is not world.mainThread:
doSnarf()
else:
L = list(L)
t = UrlSnarfThread(target=doSnarf, url=url)
t.start()
newf = utils.python.changeFunctionName(newf, f.func_name, f.__doc__)
return newf
###
# Converters, which take irc, msg, args, and a state object, and build up the
# validated and converted args for the method in state.args.
###
# This is just so we can centralize this, since it may change.
def _int(s):
base = 10
if s.startswith('0x'):
base = 16
s = s[2:]
elif s.startswith('0b'):
base = 2
s = s[2:]
elif s.startswith('0') and len(s) > 1:
base = 8
s = s[1:]
try:
return int(s, base)
except ValueError:
if base == 10:
try:
return int(float(s))
except OverflowError:
raise ValueError('I don\'t understand numbers that large.')
else:
raise
def getInt(irc, msg, args, state, type='integer', p=None):
try:
i = _int(args[0])
if p is not None:
if not p(i):
state.errorInvalid(type, args[0])
state.args.append(i)
del args[0]
except ValueError:
state.errorInvalid(type, args[0])
def getNonInt(irc, msg, args, state, type='non-integer value'):
try:
i = _int(args[0])
state.errorInvalid(type, args[0])
except ValueError:
state.args.append(args.pop(0))
def getLong(irc, msg, args, state, type='long'):
getInt(irc, msg, args, state, type)
state.args[-1] = long(state.args[-1])
def getFloat(irc, msg, args, state, type='floating point number'):
try:
state.args.append(float(args[0]))
del args[0]
except ValueError:
state.errorInvalid(type, args[0])
def getPositiveInt(irc, msg, args, state, *L):
getInt(irc, msg, args, state,
p=lambda i: i>0, type='positive integer', *L)
def getNonNegativeInt(irc, msg, args, state, *L):
getInt(irc, msg, args, state,
p=lambda i: i>=0, type='non-negative integer', *L)
def getIndex(irc, msg, args, state):
getInt(irc, msg, args, state, type='index')
if state.args[-1] > 0:
state.args[-1] -= 1
def getId(irc, msg, args, state, kind=None):
type = 'id'
if kind is not None and not kind.endswith('id'):
type = kind + ' id'
original = args[0]
try:
args[0] = args[0].lstrip('#')
getInt(irc, msg, args, state, type=type)
except Exception, e:
args[0] = original
raise
def getExpiry(irc, msg, args, state):
now = int(time.time())
try:
expires = _int(args[0])
if expires:
expires += now
state.args.append(expires)
del args[0]
except ValueError:
state.errorInvalid('number of seconds', args[0])
def getBoolean(irc, msg, args, state):
try:
state.args.append(utils.str.toBool(args[0]))
del args[0]
except ValueError:
state.errorInvalid('boolean', args[0])
def getNetworkIrc(irc, msg, args, state, errorIfNoMatch=False):
if args:
for otherIrc in world.ircs:
if otherIrc.network.lower() == args[0].lower():
state.args.append(otherIrc)
del args[0]
return
if errorIfNoMatch:
raise callbacks.ArgumentError
else:
state.args.append(irc)
def getHaveOp(irc, msg, args, state, action='do that'):
getChannel(irc, msg, args, state)
if state.channel not in irc.state.channels:
state.error('I\'m not even in %s.' % state.channel, Raise=True)
if not irc.state.channels[state.channel].isOp(irc.nick):
state.error('I need to be opped to %s.' % action, Raise=True)
def validChannel(irc, msg, args, state):
if irc.isChannel(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('channel', args[0])
def getHostmask(irc, msg, args, state):
if ircutils.isUserHostmask(args[0]):
state.args.append(args.pop(0))
else:
try:
hostmask = irc.state.nickToHostmask(args[0])
state.args.append(hostmask)
del args[0]
except KeyError:
state.errorInvalid('nick or hostmask', args[0])
def getBanmask(irc, msg, args, state):
getHostmask(irc, msg, args, state)
getChannel(irc, msg, args, state)
channel = state.channel
banmaskstyle = conf.supybot.protocols.irc.banmask
state.args[-1] = banmaskstyle.makeBanmask(state.args[-1])
def getUser(irc, msg, args, state):
try:
state.args.append(ircdb.users.getUser(msg.prefix))
except KeyError:
state.errorNotRegistered(Raise=True)
def getOtherUser(irc, msg, args, state):
# Although ircdb.users.getUser could accept a hostmask, we're explicitly
# excluding that from our interface with this check
if ircutils.isUserHostmask(args[0]):
state.errorNoUser(args[0])
try:
state.args.append(ircdb.users.getUser(args[0]))
del args[0]
except KeyError:
try:
getHostmask(irc, msg, [args[0]], state)
hostmask = state.args.pop()
state.args.append(ircdb.users.getUser(hostmask))
del args[0]
except (KeyError, callbacks.Error):
state.errorNoUser(name=args[0])
def _getRe(f):
def get(irc, msg, args, state, convert=True):
original = args[:]
s = args.pop(0)
def isRe(s):
try:
_ = f(s)
return True
except ValueError:
return False
try:
while len(s) < 512 and not isRe(s):
s += ' ' + args.pop(0)
if len(s) < 512:
if convert:
state.args.append(f(s))
else:
state.args.append(s)
else:
state.errorInvalid('regular expression', s)
except IndexError:
args[:] = original
state.errorInvalid('regular expression', s)
return get
getMatcher = _getRe(utils.str.perlReToPythonRe)
getReplacer = _getRe(utils.str.perlReToReplacer)
def getNick(irc, msg, args, state):
if ircutils.isNick(args[0]):
if 'nicklen' in irc.state.supported:
if len(args[0]) > irc.state.supported['nicklen']:
state.errorInvalid('nick', args[0],
'That nick is too long for this server.')
state.args.append(args.pop(0))
else:
state.errorInvalid('nick', args[0])
def getSeenNick(irc, msg, args, state, errmsg=None):
try:
_ = irc.state.nickToHostmask(args[0])
state.args.append(args.pop(0))
except KeyError:
if errmsg is None:
errmsg = 'I haven\'t seen %s.' % args[0]
state.error(errmsg, Raise=True)
def getChannel(irc, msg, args, state):
if state.channel:
return
if args and irc.isChannel(args[0]):
channel = args.pop(0)
elif irc.isChannel(msg.args[0]):
channel = msg.args[0]
else:
state.log.debug('Raising ArgumentError because there is no channel.')
raise callbacks.ArgumentError
state.channel = channel
state.args.append(channel)
def getChannelDb(irc, msg, args, state, **kwargs):
channelSpecific = conf.supybot.databases.plugins.channelSpecific
try:
getChannel(irc, msg, args, state, **kwargs)
channel = channelSpecific.getChannelLink(state.channel)
state.channel = channel
state.args[-1] = channel
except (callbacks.ArgumentError, IndexError):
if channelSpecific():
raise
channel = channelSpecific.link()
if not conf.get(channelSpecific.link.allow, channel):
log.warning('channelSpecific.link is globally set to %s, but '
'%s disallowed linking to its db.', channel, channel)
raise
else:
channel = channelSpecific.getChannelLink(channel)
state.channel = channel
state.args.append(channel)
def inChannel(irc, msg, args, state):
getChannel(irc, msg, args, state)
if state.channel not in irc.state.channels:
state.error('I\'m not in %s.' % state.channel, Raise=True)
def onlyInChannel(irc, msg, args, state):
if not (irc.isChannel(msg.args[0]) and msg.args[0] in irc.state.channels):
state.error('This command may only be given in a channel that I am in.',
Raise=True)
else:
state.channel = msg.args[0]
state.args.append(state.channel)
def callerInGivenChannel(irc, msg, args, state):
channel = args[0]
if irc.isChannel(channel):
if channel in irc.state.channels:
if msg.nick in irc.state.channels[channel].users:
state.args.append(args.pop(0))
else:
state.error('You must be in %s.' % channel, Raise=True)
else:
state.error('I\'m not in %s.' % channel, Raise=True)
else:
state.errorInvalid('channel', args[0])
def nickInChannel(irc, msg, args, state):
originalArgs = state.args[:]
inChannel(irc, msg, args, state)
state.args = originalArgs
if args[0] not in irc.state.channels[state.channel].users:
state.error('%s is not in %s.' % (args[0], state.channel), Raise=True)
state.args.append(args.pop(0))
def getChannelOrNone(irc, msg, args, state):
try:
getChannel(irc, msg, args, state)
except callbacks.ArgumentError:
state.args.append(None)
def checkChannelCapability(irc, msg, args, state, cap):
getChannel(irc, msg, args, state)
cap = ircdb.canonicalCapability(cap)
cap = ircdb.makeChannelCapability(state.channel, cap)
if not ircdb.checkCapability(msg.prefix, cap):
state.errorNoCapability(cap, Raise=True)
def getOp(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'op')
def getHalfop(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'halfop')
def getVoice(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'voice')
def getLowered(irc, msg, args, state):
state.args.append(ircutils.toLower(args.pop(0)))
def getSomething(irc, msg, args, state, errorMsg=None, p=None):
if p is None:
p = lambda _: True
if not args[0] or not p(args[0]):
if errorMsg is None:
errorMsg = 'You must not give the empty string as an argument.'
state.error(errorMsg, Raise=True)
else:
state.args.append(args.pop(0))
def getSomethingNoSpaces(irc, msg, args, state, errorMsg=None):
def p(s):
return len(s.split(None, 1)) == 1
if errorMsg is None:
errorMsg='You must not give a string containing spaces as an argument.'
getSomething(irc, msg, args, state, errorMsg=errorMsg, p=p)
def private(irc, msg, args, state):
if irc.isChannel(msg.args[0]):
state.errorRequiresPrivacy(Raise=True)
def public(irc, msg, args, state, errmsg=None):
if not irc.isChannel(msg.args[0]):
if errmsg is None:
errmsg = 'This message must be sent in a channel.'
state.error(errmsg, Raise=True)
def checkCapability(irc, msg, args, state, cap):
cap = ircdb.canonicalCapability(cap)
if not ircdb.checkCapability(msg.prefix, cap):
state.errorNoCapability(cap, Raise=True)
def owner(irc, msg, args, state):
checkCapability(irc, msg, args, state, 'owner')
def admin(irc, msg, args, state):
checkCapability(irc, msg, args, state, 'admin')
def anything(irc, msg, args, state):
state.args.append(args.pop(0))
def getGlob(irc, msg, args, state):
glob = args.pop(0)
if '*' not in glob and '?' not in glob:
glob = '*%s*' % glob
state.args.append(glob)
def getUrl(irc, msg, args, state):
if utils.web.urlRe.match(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('url', args[0])
def getEmail(irc, msg, args, state):
if utils.net.emailRe.match(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('email', args[0])
def getHttpUrl(irc, msg, args, state):
if utils.web.httpUrlRe.match(args[0]):
state.args.append(args.pop(0))
elif utils.web.httpUrlRe.match('http://' + args[0]):
state.args.append('http://' + args.pop(0))
else:
state.errorInvalid('http url', args[0])
def getNow(irc, msg, args, state):
state.args.append(int(time.time()))
def getCommandName(irc, msg, args, state):
if ' ' in args[0]:
state.errorInvalid('command name', args[0])
else:
state.args.append(callbacks.canonicalName(args.pop(0)))
def getIp(irc, msg, args, state):
if utils.net.isIP(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('ip', args[0])
def getLetter(irc, msg, args, state):
if len(args[0]) == 1:
state.args.append(args.pop(0))
else:
state.errorInvalid('letter', args[0])
def getMatch(irc, msg, args, state, regexp, errmsg):
m = regexp.search(args[0])
if m is not None:
state.args.append(m)
del args[0]
else:
state.error(errmsg, Raise=True)
def getLiteral(irc, msg, args, state, literals, errmsg=None):
# ??? Should we allow abbreviations?
if isinstance(literals, basestring):
literals = (literals,)
abbrevs = utils.abbrev(literals)
if args[0] in abbrevs:
state.args.append(abbrevs[args.pop(0)])
elif errmsg is not None:
state.error(errmsg, Raise=True)
else:
raise callbacks.ArgumentError
def getTo(irc, msg, args, state):
if args[0].lower() == 'to':
args.pop(0)
def getPlugin(irc, msg, args, state, require=True):
cb = irc.getCallback(args[0])
if cb is not None:
state.args.append(cb)
del args[0]
elif require:
state.errorInvalid('plugin', args[0])
else:
state.args.append(None)
def getIrcColor(irc, msg, args, state):
if args[0] in ircutils.mircColors:
state.args.append(ircutils.mircColors[args.pop(0)])
else:
state.errorInvalid('irc color')
def getText(irc, msg, args, state):
if args:
state.args.append(' '.join(args))
args[:] = []
else:
raise IndexError
wrappers = ircutils.IrcDict({
'admin': admin,
'anything': anything,
'banmask': getBanmask,
'boolean': getBoolean,
'callerInGivenChannel': callerInGivenChannel,
'capability': getSomethingNoSpaces,
'channel': getChannel,
'channelDb': getChannelDb,
'checkCapability': checkCapability,
'checkChannelCapability': checkChannelCapability,
'color': getIrcColor,
'commandName': getCommandName,
'email': getEmail,
'expiry': getExpiry,
'filename': getSomething, # XXX Check for validity.
'float': getFloat,
'glob': getGlob,
'halfop': getHalfop,
'haveOp': getHaveOp,
'hostmask': getHostmask,
'httpUrl': getHttpUrl,
'id': getId,
'inChannel': inChannel,
'index': getIndex,
'int': getInt,
'ip': getIp,
'letter': getLetter,
'literal': getLiteral,
'long': getLong,
'lowered': getLowered,
'matches': getMatch,
'networkIrc': getNetworkIrc,
'nick': getNick,
'nickInChannel': nickInChannel,
'nonInt': getNonInt,
'nonNegativeInt': getNonNegativeInt,
'now': getNow,
'onlyInChannel': onlyInChannel,
'op': getOp,
'otherUser': getOtherUser,
'owner': owner,
'plugin': getPlugin,
'positiveInt': getPositiveInt,
'private': private,
'public': public,
'regexpMatcher': getMatcher,
'regexpReplacer': getReplacer,
'seenNick': getSeenNick,
'something': getSomething,
'somethingWithoutSpaces': getSomethingNoSpaces,
'text': getText,
'to': getTo,
'url': getUrl,
'user': getUser,
'validChannel': validChannel,
'voice': getVoice,
})
def addConverter(name, wrapper):
wrappers[name] = wrapper
class UnknownConverter(KeyError):
pass
def getConverter(name):
try:
return wrappers[name]
except KeyError, e:
raise UnknownConverter, str(e)
def callConverter(name, irc, msg, args, state, *L):
getConverter(name)(irc, msg, args, state, *L)
###
# Contexts. These determine what the nature of conversions is; whether they're
# defaulted, or many of them are allowed, etc. Contexts should be reusable;
# i.e., they should not maintain state between calls.
###
def contextify(spec):
if not isinstance(spec, context):
spec = context(spec)
return spec
def setDefault(state, default):
if callable(default):
state.args.append(default())
else:
state.args.append(default)
class context(object):
def __init__(self, spec):
self.args = ()
self.spec = spec # for repr
if isinstance(spec, tuple):
assert spec, 'tuple spec must not be empty.'
self.args = spec[1:]
self.converter = getConverter(spec[0])
elif spec is None:
self.converter = getConverter('anything')
elif isinstance(spec, basestring):
self.args = ()
self.converter = getConverter(spec)
else:
assert isinstance(spec, context)
self.converter = spec
def __call__(self, irc, msg, args, state):
log.debug('args before %r: %r', self, args)
self.converter(irc, msg, args, state, *self.args)
log.debug('args after %r: %r', self, args)
def __repr__(self):
return '<%s for %s>' % (self.__class__.__name__, self.spec)
class rest(context):
def __call__(self, irc, msg, args, state):
if args:
original = args[:]
args[:] = [' '.join(args)]
try:
super(rest, self).__call__(irc, msg, args, state)
except Exception, e:
args[:] = original
else:
raise IndexError
# additional means: Look for this (and make sure it's of this type). If
# there are no arguments for us to check, then use our default.
class additional(context):
def __init__(self, spec, default=None):
self.__parent = super(additional, self)
self.__parent.__init__(spec)
self.default = default
def __call__(self, irc, msg, args, state):
try:
self.__parent.__call__(irc, msg, args, state)
except IndexError:
log.debug('Got IndexError, returning default.')
setDefault(state, self.default)
# optional means: Look for this, but if it's not the type I'm expecting or
# there are no arguments for us to check, then use the default value.
class optional(additional):
def __call__(self, irc, msg, args, state):
try:
super(optional, self).__call__(irc, msg, args, state)
except (callbacks.ArgumentError, callbacks.Error), e:
log.debug('Got %s, returning default.', utils.exnToString(e))
state.errored = False
setDefault(state, self.default)
class any(context):
def __init__(self, spec, continueOnError=False):
self.__parent = super(any, self)
self.__parent.__init__(spec)
self.continueOnError = continueOnError
def __call__(self, irc, msg, args, state):
st = state.essence()
try:
while args:
self.__parent.__call__(irc, msg, args, st)
except IndexError:
pass
except (callbacks.ArgumentError, callbacks.Error), e:
if not self.continueOnError:
raise
else:
log.debug('Got %s, returning default.', utils.exnToString(e))
pass
state.args.append(st.args)
class many(any):
def __call__(self, irc, msg, args, state):
super(many, self).__call__(irc, msg, args, state)
if not state.args[-1]:
state.args.pop()
raise callbacks.ArgumentError
class first(context):
def __init__(self, *specs, **kw):
if 'default' in kw:
self.default = kw.pop('default')
assert not kw, 'Bad kwargs for first.__init__'
self.spec = specs # for __repr__
self.specs = map(contextify, specs)
def __call__(self, irc, msg, args, state):
errored = False
for spec in self.specs:
try:
spec(irc, msg, args, state)
return
except Exception, e:
errored = state.errored
state.errored = False
continue
if hasattr(self, 'default'):
state.args.append(self.default)
else:
state.errored = errored
raise e
class reverse(context):
def __call__(self, irc, msg, args, state):
args[:] = args[::-1]
super(reverse, self).__call__(irc, msg, args, state)
args[:] = args[::-1]
class commalist(context):
def __call__(self, irc, msg, args, state):
original = args[:]
st = state.essence()
trailingComma = True
try:
while trailingComma:
arg = args.pop(0)
if not arg.endswith(','):
trailingComma = False
for part in arg.split(','):
if part: # trailing commas
super(commalist, self).__call__(irc, msg, [part], st)
state.args.append(st.args)
except Exception, e:
args[:] = original
raise
class getopts(context):
"""The empty string indicates that no argument is taken; None indicates
that there is no converter for the argument."""
def __init__(self, getopts):
self.spec = getopts # for repr
self.getopts = {}
self.getoptL = []
for (name, spec) in getopts.iteritems():
if spec == '':
self.getoptL.append(name)
self.getopts[name] = None
else:
self.getoptL.append(name + '=')
self.getopts[name] = contextify(spec)
log.debug('getopts: %r', self.getopts)
log.debug('getoptL: %r', self.getoptL)
def __call__(self, irc, msg, args, state):
log.debug('args before %r: %r', self, args)
(optlist, rest) = getopt.getopt(args, '', self.getoptL)
getopts = []
for (opt, arg) in optlist:
opt = opt[2:] # Strip --
log.debug('opt: %r, arg: %r', opt, arg)
context = self.getopts[opt]
if context is not None:
st = state.essence()
context(irc, msg, [arg], st)
assert len(st.args) == 1
getopts.append((opt, st.args[0]))
else:
getopts.append((opt, True))
state.args.append(getopts)
args[:] = rest
log.debug('args after %r: %r', self, args)
###
# This is our state object, passed to converters along with irc, msg, and args.
###
class State(object):
log = log
def __init__(self, types):
self.args = []
self.kwargs = {}
self.types = types
self.channel = None
self.errored = False
def __getattr__(self, attr):
if attr.startswith('error'):
self.errored = True
return getattr(dynamic.irc, attr)
else:
raise AttributeError, attr
def essence(self):
st = State(self.types)
for (attr, value) in self.__dict__.iteritems():
if attr not in ('args', 'kwargs'):
setattr(st, attr, value)
return st
def __repr__(self):
return '%s(args=%r, kwargs=%r, channel=%r)' % (self.__class__.__name__,
self.args, self.kwargs,
self.channel)
###
# This is a compiled Spec object.
###
class Spec(object):
def _state(self, types, attrs={}):
st = State(types)
st.__dict__.update(attrs)
st.allowExtra = self.allowExtra
return st
def __init__(self, types, allowExtra=False):
self.types = types
self.allowExtra = allowExtra
utils.seq.mapinto(contextify, self.types)
def __call__(self, irc, msg, args, stateAttrs={}):
state = self._state(self.types[:], stateAttrs)
while state.types:
context = state.types.pop(0)
try:
context(irc, msg, args, state)
except IndexError:
raise callbacks.ArgumentError
if args and not state.allowExtra:
log.debug('args and not self.allowExtra: %r', args)
raise callbacks.ArgumentError
return state
def wrap(f, specList=[], name=None, **kw):
name = name or f.func_name
spec = Spec(specList, **kw)
def newf(self, irc, msg, args, **kwargs):
state = spec(irc, msg, args, stateAttrs={'cb': self, 'log': self.log})
self.log.debug('State before call: %s', state)
if state.errored:
self.log.debug('Refusing to call %s due to state.errored.', f)
else:
try:
f(self, irc, msg, args, *state.args, **state.kwargs)
except TypeError:
self.log.error('Spec: %s', specList)
self.log.error('Received args: %s', args)
code = f.func_code
funcArgs = inspect.getargs(code)[0][len(self.commandArgs):]
self.log.error('Extra args: %s', funcArgs)
raise
return utils.python.changeFunctionName(newf, name, f.__doc__)
__all__ = [
# Contexts.
'any', 'many',
'optional', 'additional',
'rest', 'getopts',
'first', 'reverse',
'commalist',
# Converter helpers.
'getConverter', 'addConverter', 'callConverter',
# Decorators.
'urlSnarfer', 'thread',
# Functions.
'wrap', 'process', 'regexp_wrapper',
# Stuff for testing.
'Spec',
]
# This doesn't work. Suck.
## if world.testing:
## __all__.append('Spec')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
from typing import Any, Dict, List, Optional
from unittest.mock import Mock, patch
from django.conf import settings
from zerver.apps import flush_cache
from zerver.lib.cache import (
MEMCACHED_MAX_KEY_LENGTH,
InvalidCacheKeyException,
NotFoundInCache,
bulk_cached_fetch,
cache_delete,
cache_delete_many,
cache_get,
cache_get_many,
cache_set,
cache_set_many,
cache_with_key,
get_cache_with_key,
safe_cache_get_many,
safe_cache_set_many,
user_profile_by_email_cache_key,
validate_cache_key,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import queries_captured
from zerver.models import UserProfile, get_system_bot, get_user_profile_by_email
class AppsTest(ZulipTestCase):
def test_cache_gets_flushed(self) -> None:
with patch('zerver.apps.logging.info') as mock_logging:
with patch('zerver.apps.cache.clear') as mock:
# The argument to flush_cache doesn't matter
flush_cache(Mock())
mock.assert_called_once()
mock_logging.assert_called_once()
class CacheKeyValidationTest(ZulipTestCase):
def test_validate_cache_key(self) -> None:
validate_cache_key('nice_Ascii:string!~')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('utf8_character:ą')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('new_line_character:\n')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('control_character:\r')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('whitespace_character: ')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('too_long:' + 'X'*MEMCACHED_MAX_KEY_LENGTH)
with self.assertRaises(InvalidCacheKeyException):
# validate_cache_key does validation on a key with the
# KEY_PREFIX appended to the start, so even though we're
# passing something "short enough" here, it becomes too
# long after appending KEY_PREFIX.
validate_cache_key('X' * (MEMCACHED_MAX_KEY_LENGTH - 2))
def test_cache_functions_raise_exception(self) -> None:
invalid_key = 'invalid_character:\n'
good_key = "good_key"
with self.assertRaises(InvalidCacheKeyException):
cache_get(invalid_key)
with self.assertRaises(InvalidCacheKeyException):
cache_set(invalid_key, 0)
with self.assertRaises(InvalidCacheKeyException):
cache_delete(invalid_key)
with self.assertRaises(InvalidCacheKeyException):
cache_get_many([good_key, invalid_key])
with self.assertRaises(InvalidCacheKeyException):
cache_set_many({good_key: 0, invalid_key: 1})
with self.assertRaises(InvalidCacheKeyException):
cache_delete_many([good_key, invalid_key])
class CacheWithKeyDecoratorTest(ZulipTestCase):
def test_cache_with_key_invalid_character(self) -> None:
def invalid_characters_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:invalid_character:ą:{user_id}'
@cache_with_key(invalid_characters_cache_key_function, timeout=1000)
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.cache_set') as mock_set, \
patch('zerver.lib.cache.logger.warning') as mock_warn:
with queries_captured() as queries:
result = get_user_function_with_bad_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
mock_set.assert_not_called()
mock_warn.assert_called_once()
def test_cache_with_key_key_too_long(self) -> None:
def too_long_cache_key_function(user_id: int) -> str:
return 'CacheWithKeyDecoratorTest:very_long_key:{}:{}'.format('a'*250, user_id)
@cache_with_key(too_long_cache_key_function, timeout=1000)
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.cache_set') as mock_set, \
patch('zerver.lib.cache.logger.warning') as mock_warn:
with queries_captured() as queries:
result = get_user_function_with_bad_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
mock_set.assert_not_called()
mock_warn.assert_called_once()
def test_cache_with_key_good_key(self) -> None:
def good_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:good_cache_key:{user_id}'
@cache_with_key(good_cache_key_function, timeout=1000)
def get_user_function_with_good_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with queries_captured() as queries:
result = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
# The previous function call should have cached the result correctly, so now
# no database queries should happen:
with queries_captured() as queries_two:
result_two = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result_two, hamlet)
self.assert_length(queries_two, 0)
def test_cache_with_key_none_values(self) -> None:
def cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:test_cache_with_key_none_values:{user_id}'
@cache_with_key(cache_key_function, timeout=1000)
def get_user_function_can_return_none(user_id: int) -> Optional[UserProfile]:
try:
return UserProfile.objects.get(id=user_id)
except UserProfile.DoesNotExist:
return None
last_user_id = UserProfile.objects.last().id
with queries_captured() as queries:
result = get_user_function_can_return_none(last_user_id + 1)
self.assertEqual(result, None)
self.assert_length(queries, 1)
with queries_captured() as queries:
result_two = get_user_function_can_return_none(last_user_id + 1)
self.assertEqual(result_two, None)
self.assert_length(queries, 0)
class GetCacheWithKeyDecoratorTest(ZulipTestCase):
def test_get_cache_with_good_key(self) -> None:
# Test with a good cache key function, but a get_user function
# that always returns None just to make it convenient to tell
# whether the cache was used (whatever we put in the cache) or
# we got the result from calling the function (None)
def good_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:good_cache_key:{user_id}'
@get_cache_with_key(good_cache_key_function)
def get_user_function_with_good_cache_keys(user_id: int) -> Any: # nocoverage
return
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.logger.warning') as mock_warn:
with self.assertRaises(NotFoundInCache):
get_user_function_with_good_cache_keys(hamlet.id)
mock_warn.assert_not_called()
cache_set(good_cache_key_function(hamlet.id), hamlet)
result = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
def test_get_cache_with_bad_key(self) -> None:
def bad_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:invalid_character:ą:{user_id}'
@get_cache_with_key(bad_cache_key_function)
def get_user_function_with_bad_cache_keys(user_id: int) -> Any: # nocoverage
return
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.logger.warning') as mock_warn:
with self.assertRaises(NotFoundInCache):
get_user_function_with_bad_cache_keys(hamlet.id)
mock_warn.assert_called_once()
class SafeCacheFunctionsTest(ZulipTestCase):
def test_safe_cache_functions_with_all_good_keys(self) -> None:
items = {"SafeFunctionsTest:key1": 1, "SafeFunctionsTest:key2": 2, "SafeFunctionsTest:key3": 3}
safe_cache_set_many(items)
result = safe_cache_get_many(list(items.keys()))
for key, value in result.items():
self.assertEqual(value, items[key])
def test_safe_cache_functions_with_all_bad_keys(self) -> None:
items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
with patch('zerver.lib.cache.logger.warning') as mock_warn:
safe_cache_set_many(items)
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
with patch('zerver.lib.cache.logger.warning') as mock_warn:
result = safe_cache_get_many(list(items.keys()))
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
self.assertEqual(result, {})
def test_safe_cache_functions_with_good_and_bad_keys(self) -> None:
bad_items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
good_items = {"SafeFunctionsTest:goodkey1": 3, "SafeFunctionsTest:goodkey2": 4}
items = {**good_items, **bad_items}
with patch('zerver.lib.cache.logger.warning') as mock_warn:
safe_cache_set_many(items)
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
with patch('zerver.lib.cache.logger.warning') as mock_warn:
result = safe_cache_get_many(list(items.keys()))
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
self.assertEqual(result, good_items)
class BotCacheKeyTest(ZulipTestCase):
def test_bot_profile_key_deleted_on_save(self) -> None:
# Get the profile cached on both cache keys:
user_profile = get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT)
bot_profile = get_system_bot(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(user_profile, bot_profile)
# Flip the setting and save:
flipped_setting = not bot_profile.is_api_super_user
bot_profile.is_api_super_user = flipped_setting
bot_profile.save()
# The .save() should have deleted cache keys, so if we fetch again,
# the returned objects should have is_api_super_user set correctly.
bot_profile2 = get_system_bot(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(bot_profile2.is_api_super_user, flipped_setting)
user_profile2 = get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(user_profile2.is_api_super_user, flipped_setting)
def get_user_email(user: UserProfile) -> str:
return user.email # nocoverage
class GenericBulkCachedFetchTest(ZulipTestCase):
def test_query_function_called_only_if_needed(self) -> None:
# Get the user cached:
hamlet = get_user_profile_by_email(self.example_email("hamlet"))
class CustomException(Exception):
pass
def query_function(emails: List[str]) -> List[UserProfile]:
raise CustomException("The query function was called")
# query_function shouldn't be called, because the only requested object
# is already cached:
result: Dict[str, UserProfile] = bulk_cached_fetch(
cache_key_function=user_profile_by_email_cache_key,
query_function=query_function,
object_ids=[self.example_email("hamlet")],
id_fetcher=get_user_email,
)
self.assertEqual(result, {hamlet.delivery_email: hamlet})
with self.assertLogs(level='INFO') as info_log:
flush_cache(Mock())
self.assertEqual(info_log.output, [
'INFO:root:Clearing memcached cache after migrations'
])
# With the cache flushed, the query_function should get called:
with self.assertRaises(CustomException):
result = bulk_cached_fetch(
cache_key_function=user_profile_by_email_cache_key,
query_function=query_function,
object_ids=[self.example_email("hamlet")],
id_fetcher=get_user_email,
)
def test_empty_object_ids_list(self) -> None:
class CustomException(Exception):
pass
def cache_key_function(email: str) -> str: # nocoverage -- this is just here to make sure it's not called
raise CustomException("The cache key function was called")
def query_function(emails: List[str]) -> List[UserProfile]: # nocoverage -- this is just here to make sure it's not called
raise CustomException("The query function was called")
# query_function and cache_key_function shouldn't be called, because
# objects_ids is empty, so there's nothing to do.
result: Dict[str, UserProfile] = bulk_cached_fetch(
cache_key_function=cache_key_function,
query_function=query_function,
object_ids=[],
id_fetcher=get_user_email,
)
self.assertEqual(result, {})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.