gt stringclasses 1
value | context stringlengths 2.49k 119k |
|---|---|
#!/usr/bin/env python
"""Functions for generating Python object interfaces from object definitions"""
__author__ = 'Adam R. Smith, Thomas Lennan, Stephen Henrie, Dave Foster, Seman Said'
from collections import OrderedDict
import csv
import os
import re
import string
import yaml
import cgi
from pyon.core.path import list_files_recursive
from pyon.core.interfaces.interface_util import get_object_definition_from_datastore, get_service_definition_from_datastore
class IonYamlLoader(yaml.Loader):
""" For ION-specific overrides of YAML loading behavior. """
pass
enums_by_name = {}
html_doc_templates = {
'obj_doc':
'''<!-- <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>${classname}</title>
</head>
<body> -->
<p>
<br class="atl-forced-newline" />
</p>
<div class="panel" style="border-width: 1px;">
<div class="panelContent">
<h3>Class Details</h3>
<div class='table-wrap'>
<table class='confluenceTable'>
<tr style="padding:5px">
<th class='confluenceTh'>Object Type:</th>
<td class='confluenceTd'>${classname}</td>
</tr>
<tr>
<th class='confluenceTh'>Base Types:</th>
<td class='confluenceTd'>${baseclasses}</td>
</tr>
<tr>
<th class='confluenceTh'>Sub Types:</th>
<td class='confluenceTd'>${subclasses}</td>
</tr>
<tr>
<th class='confluenceTh'>Decorators:</th>
<td class='confluenceTd'>${decorators} </td>
</tr>
<tr>
<th class='confluenceTh'>Description:</th>
<td class='confluenceTd'>${classcomment} </td>
</tr>
</table>
</div>
</div>
</div>
<p>
<br class="atl-forced-newline" />
</p>
<div class="panel" style="border-width: 1px;">
<div class="panelContent">
<h3>Attributes</h3>
<div class='table-wrap'>
<table class='confluenceTable'>
<tr>
<th class='confluenceTh'>Name</th>
<th class='confluenceTh'>Type</th>
<th class='confluenceTh'>Default</th>
<th class='confluenceTh'>Decorators</th>
<th class='confluenceTh'>Description</th>
</tr>
${attrtableentries}
</table>
</div>
</div>
</div>
${super_class_attr_tables}
<p>
<br class="atl-forced-newline" />
</p>
<div class="panel" style="border-width: 1px;">
<div class="panelContent">
<h3>Associations</h3>
<div class='table-wrap'>
<table class='confluenceTable'>
<tr>
<th class='confluenceTh'>Subject</th>
<th class='confluenceTh'>Predicate</th>
<th class='confluenceTh'>Object</th>
<th class='confluenceTh'>Constraints</th>
<th class='confluenceTh'>Description</th>
</tr>
${assoctableentries}
</table>
</div>
</div>
</div>
<!-- </body>
</html> -->
''',
'attribute_table_entry':
'''<tr>
<td class='confluenceTd'>${attrname}</td>
<td class='confluenceTd'>${type}</td>
<td class='confluenceTd'>${default}</td>
<td class='confluenceTd'>${decorators}</td>
<td class='confluenceTd'>${attrcomment}</td>
</tr>''',
'association_table_entry':
'''<tr>
<td class='confluenceTd'>${subject}</td>
<td class='confluenceTd'>${predicate}</td>
<td class='confluenceTd'>${object}</td>
<td class='confluenceTd'>${constraints}</td>
<td class='confluenceTd'>${description}</td>
</tr>''',
'super_class_attribute_table':
'''<div class="panel" style="border-width: 1px;">
<div class="panelContent">
<h3>Attributes of superclass ${super_class_name}</h3>
<div class='table-wrap'>
<table class='confluenceTable'>
<tr>
<th class='confluenceTh'>Name</th>
<th class='confluenceTh'>Type</th>
<th class='confluenceTh'>Default</th>
<th class='confluenceTh'>Decorators</th>
<th class='confluenceTh'>Description</th>
</tr>
${superclassattrtableentries}
</table>
</div>
</div>
</div>''',
}
html_doc_templates = dict(((k, string.Template(v)) for k, v in html_doc_templates.iteritems()))
csv_doc_templates = {
'object_types_doc':
'''ObjectTypeName,type,extends,description
${rowentries}
''',
'object_types_row_entry':
'''${classname},${type},${extends},${description}
''',
'object_attributes_doc':
'''ObjectTypeName,attribute name,attribute type, attribute default,description
${rowentries}
''',
'object_attributes_row_entry':
'''${classname},${name},${type},${default},${description}
''',
}
csv_doc_templates = dict(((k, string.Template(v)) for k, v in csv_doc_templates.iteritems()))
class ObjectModelGenerator:
data_yaml_text = ''
dataobject_output_text = ''
def_dict = {}
class_args_dict = {}
csv_attributes_row_entries = []
csv_types_row_entries = []
def __init__(self, system_name=None, read_from_yaml_file=False):
self.system_name = system_name
self.read_from_yaml_file = read_from_yaml_file
self.obj_data = {}
self._associations = None
def generate(self, opts):
'''
Generate object model
'''
# Get data from the file
combined_yaml_text = self.read_yaml_text()
if not combined_yaml_text or len(combined_yaml_text) == 0:
print "object_model_generator: Error!!! the datastore (or the YAML file) is empty."
exit()
# Parse and generate enums first
self.generate_enums(combined_yaml_text, opts)
# Add custom constructors so YAML doesn't choke
self.add_yaml_constructors()
# Generate model object classes in the object.py file
self.generate_objects(opts)
# Write to the objects.py file
if not opts.dryrun:
self.write_files(opts)
# Generate the HTML files related
if opts.objectdoc:
self.generate_object_specs()
def read_yaml_text(self):
'''
Gets the data from YAML files or datastore
'''
if self.read_from_yaml_file:
print " Object interface generator: Reading object definitions from files"
data_yaml_files = list_files_recursive('obj/data', '*.yml', ['ion.yml', 'resource.yml', 'shared.yml'])
self.data_yaml_text = '\n\n'.join((file.read() for file in(open(path, 'r') for path in data_yaml_files if os.path.exists(path))))
service_yaml_files = list_files_recursive('obj/services', '*.yml')
service_yaml_text = '\n\n'.join((file.read() for file in(open(path, 'r') for path in service_yaml_files if os.path.exists(path))))
data = self.data_yaml_text + "\n" + service_yaml_text
else:
print " Object interface generator: Reading object definitions from datastore"
self.data_yaml_text = get_object_definition_from_datastore(self.system_name)
if not self.data_yaml_text:
return ''
data = self.data_yaml_text + '\n' + get_service_definition_from_datastore(self.system_name)
return data
def generate_enums(self, combined_yaml_text, opts):
'''
Parse YAML text looking for enums
Parse once looking for enum types. These classes will go at
the top of the objects.py. Defs are also put into a dict
so we can easily reference their values later in the parsing logic.
'''
self.dataobject_output_text = "#!/usr/bin/env python\n\n"
self.dataobject_output_text += "#\n# This file is auto generated\n#\n\n"
self.dataobject_output_text += "from pyon.core.object import IonObjectBase\n"
self.dataobject_output_text += "#\n# Enums\n\n"
self.dataobject_output_text += "class IonEnum(object):\n"
self.dataobject_output_text += " pass\n"
for line in combined_yaml_text.split('\n'):
if '!enum ' in line:
# If stand alone enum type definition
tokens = line.split(':')
classname = tokens[0].strip()
enum_def = tokens[1].strip(' )').replace('!enum(', '')
if 'name' in enum_def:
name_str = enum_def.split(',', 1)[0]
name_val = name_str.split('=')[1].strip()
if line[0].isalpha():
assert line.startswith(name_val + ':'), "enum name/class name mismatch %s/%s" % (classname, name_val)
else:
name_str = ''
name_val = classname
default_str = enum_def.rsplit(',', 1)[1]
default_val = default_str.split('=')[1].strip()
value_str = enum_def.replace(name_str, '').replace(default_str, '').strip(', ')
value_val = value_str.split('=')[1].replace(' ', '').strip('()').split(',')
assert name_val not in enums_by_name, "enum with type name %s redefined" % name_val
enums_by_name[name_val] = {"values": value_val, "default": default_val}
self.dataobject_output_text += "\nclass " + name_val + "(IonEnum):\n"
for i, val in enumerate(value_val, 1):
self.dataobject_output_text += " " + val + " = " + str(i) + "\n"
self.dataobject_output_text += " _value_map = {"
for i, val in enumerate(value_val, 1):
if i > 1:
self.dataobject_output_text += ", "
self.dataobject_output_text += "'" + val + "': " + str(i)
self.dataobject_output_text += "}\n"
self.dataobject_output_text += " _str_map = {"
for i, val in enumerate(value_val, 1):
if i > 1:
self.dataobject_output_text += ", "
self.dataobject_output_text += str(i) + ": '" + val + "'"
if opts.objectdoc:
self.csv_attributes_row_entries.append(["", classname, val, "", "int", str(i), ""])
self.dataobject_output_text += "}\n"
if opts.objectdoc:
self.csv_types_row_entries.append([classname, 'enum', 'object', ""])
def add_yaml_constructors(self):
'''
Walk the data model definition and add Yaml constructors
'''
# Add constructor for enum types
enum_tag = u'!enum'
def enum_constructor(loader, node):
val_str = str(node.value)
val_str = val_str[1:-1].strip()
if 'name' in val_str:
name_str = val_str.split(',', 1)[0]
name_val = name_str.split('=')[1].strip()
return {"__IsEnum": True, "value": name_val + "." + enums_by_name[name_val]["default"], "type": name_val}
else:
return {"__IsEnum": True, "_NameNotProvided": True}
yaml.add_constructor(enum_tag, enum_constructor, Loader=IonYamlLoader)
defs = yaml.load_all(self.data_yaml_text, Loader=IonYamlLoader)
for def_set in defs:
for name, _def in def_set.iteritems():
if isinstance(_def, OrderedDict):
self.def_dict[name] = _def
tag = u'!%s' % (name)
def constructor(loader, node):
value = node.tag.strip('!')
# See if this is an enum ref
if value in enums_by_name:
return {"__IsEnum": True, "value": value + "." + enums_by_name[value]["default"], "type": value}
else:
return str(value) + "()"
yaml.add_constructor(tag, constructor, Loader=IonYamlLoader)
xtag = u'!Extends_%s' % (name)
def extends_constructor(loader, node):
if isinstance(node, yaml.MappingNode):
value = loader.construct_mapping(node)
else:
value = {}
return value
yaml.add_constructor(xtag, extends_constructor, Loader=IonYamlLoader)
def generate_objects(self, opts):
'''
Walk the data model definition yaml files. Generate
corresponding classes in the objects.py file.
'''
# Delimit the break between the enum classes and
# and the data model classes
self.dataobject_output_text += "\n\n# Data Objects\n"
current_class_def_dict = None
schema_extended = False
current_class_schema = ""
current_class_comment = ""
current_class = ""
super_class = "IonObjectBase"
args = []
fields = []
field_details = []
init_lines = []
first_time = True
decorators = ''
class_decorators = {}
description = ''
csv_description = ''
class_comment = ''
for line in self.data_yaml_text.split('\n'):
if line.isspace():
continue
elif line.startswith(' #'):
# Check for decorators tag
if len(line) > 4 and line.startswith(' #@'):
dec = line.strip()[2:].split("=")
key = dec[0]
value = dec[1] if len(dec) == 2 else ""
# Add it to the decorator list
if not decorators:
decorators = '"' + key + '": "' + value + '"'
else:
decorators = decorators + ', "' + key + '": "' + value + '"'
else:
init_lines.append(' ' + line + '\n')
if not description:
description = line.strip()[1:]
csv_description = line.strip()
else:
description = description + ' ' + line.strip()[1:]
csv_description = csv_description + ' ' + line.strip()
elif line.startswith(' '):
if current_class_def_dict:
field = line.split(":")[0].strip()
try:
value = current_class_def_dict[field]
# Get inline comment
if '#' in line:
dsc = line.split('#', 1)[1].strip()
if not description:
description = dsc
csv_description = dsc
else:
description = description + ' ' + dsc
csv_description = csv_description + ' ' + dsc
except KeyError:
# Ignore key error because value is nested
continue
enum_type = ""
if isinstance(value, str) and '()' in value:
value_type = value.strip('()')
converted_value = value
args.append(", ")
args.append(field + "=None")
init_lines.append(' self.' + field + " = " + field + " or " + value_type + "()\n")
else:
value_type = type(value).__name__
if value_type == 'dict' and "__IsEnum" in value:
enum_type = value["type"]
value_type = 'int'
converted_value = self.convert_val(value)
if value_type in ['OrderedDict', 'list', 'tuple']:
if value_type == 'OrderedDict':
value_type = 'dict'
args.append(", ")
args.append(field + "=None")
init_lines.append(' self.' + field + " = " + field + " or " + converted_value + "\n")
else:
args.append(", ")
args.append(field + "=" + converted_value)
init_lines.append(' self.' + field + " = " + field + "\n")
fields.append(field)
field_details.append((field, value_type, converted_value, csv_description, decorators))
if enum_type:
current_class_schema += "\n '" + field + "': {'type': '" + value_type + "', 'default': " + converted_value + ", 'enum_type': '" + enum_type + "', 'decorators': {" + decorators + "}" + ", 'description': '" + re.escape(description) + "'},"
else:
current_class_schema += "\n '" + field + "': {'type': '" + value_type + "', 'default': " + converted_value + ", 'decorators': {" + decorators + "}" + ", 'description': '" + re.escape(description) + "'},"
decorators = ''
description = ''
csv_description = ''
elif line and (line[0].isalpha() or line.startswith("#")):
if '!enum' in line:
continue
#Handle class level decorators
if line.startswith('#@'):
dec = line.strip()[2:].split("=")
key = dec[0]
value = dec[1] if len(dec) == 2 else ""
class_decorators[key]=value
#Handle class level comments
if line.startswith('#'):
dsc = line[1:].strip()
if not class_comment:
class_comment = dsc
else:
class_comment = class_comment + ' ' + dsc
continue
if first_time:
first_time = False
else:
current_class_args_dict = {'args': args, 'fields': fields, 'field_details': field_details, 'extends': super_class, 'description': current_class_comment, 'decorators': ""}
if current_class in self.class_args_dict:
self.class_args_dict[current_class].update(current_class_args_dict)
else:
self.class_args_dict[current_class] = current_class_args_dict
for arg in args:
self.dataobject_output_text += arg
self.dataobject_output_text += "):\n"
for init_line in init_lines:
self.dataobject_output_text += init_line
if len(current_class_schema) > 0:
if schema_extended:
self.dataobject_output_text += current_class_schema + "\n }.items())\n"
else:
self.dataobject_output_text += current_class_schema + "\n }\n"
self.dataobject_output_text += '\n'
args = []
fields = []
field_details = []
init_lines = []
current_class = line.split(":")[0]
try:
current_class_def_dict = self.def_dict[current_class]
except KeyError:
current_class_def_dict = {}
super_class = "IonObjectBase"
if ': !Extends_' in line:
super_class = line.split("!Extends_")[1]
args = args + self.class_args_dict[super_class]["args"]
init_lines.append(' ' + super_class + ".__init__(self")
fields = fields + self.class_args_dict[super_class]["fields"]
for super_field in fields:
init_lines.append(", " + super_field)
init_lines.append(")\n")
schema_extended = True
current_class_schema = "\n _schema = dict(" + super_class + "._schema.items() + {"
line = line.replace(': !Extends_', '(')
else:
schema_extended = False
current_class_schema = "\n _schema = {"
line = line.replace(':', '(IonObjectBase')
init_lines.append(" self.type_ = '" + current_class + "'\n")
class_comment_temp = "\n '''\n " + class_comment.replace("'''","\\'\\'\\'") + "\n '''" if class_comment else ''
self.dataobject_output_text += "class " + line + "):" + class_comment_temp + "\n\n"
self.dataobject_output_text += " _class_info = {'name': '" + "', 'decorators': " + str(class_decorators) + \
", 'docstring': '"+ re.escape(class_comment)+"'}\n\n"
self.dataobject_output_text += " def __init__(self"
current_class_comment = class_comment
class_comment = ''
class_decorators = {}
if len(args) > 0:
for arg in args:
self.dataobject_output_text += arg
self.dataobject_output_text += "):\n"
for init_line in init_lines:
self.dataobject_output_text += init_line
if len(current_class_schema) > 0:
if schema_extended:
self.dataobject_output_text += current_class_schema + "\n }.items())\n"
else:
self.dataobject_output_text += current_class_schema + "\n }\n"
# clean up cumulative version from dictionary because it creates problems
# while generating document
cv_classes=[]
for cv_class, cv in self.class_args_dict.iteritems():
if 'cumulative_version' in cv:
cv_classes.append(cv_class)
for cv_class in cv_classes:
del self.class_args_dict[cv_class]
def generate_object_specs(self):
print " Object interface generator: Generating additional object specs in HTML and CSV"
datamodelhtmldir = 'interface/object_html'
if not os.path.exists(datamodelhtmldir):
os.makedirs(datamodelhtmldir)
for objname, objschema in self.class_args_dict.iteritems():
field_details = objschema["field_details"]
super_class = objschema["extends"]
attrtableentries = ""
field_details.sort()
for field_detail in field_details:
att_comments = cgi.escape(field_detail[3].strip(' ,#').replace('#', ''))
attrtableentries += html_doc_templates['attribute_table_entry'].substitute(
attrname=field_detail[0], type=field_detail[1].replace("'", '"'),
default=field_detail[2].replace("'", '"'),
decorators=cgi.escape(field_detail[4]),
attrcomment=att_comments)
self.csv_attributes_row_entries.append(["", objname, field_detail[0], "", field_detail[1], field_detail[2], field_detail[3].strip(' ,#').replace('#', '')])
related_associations = self._lookup_associations(objname)
#Check for missing docstring
for assockey, assocval in related_associations.iteritems():
if not assocval.has_key("docstring"):
assocval["docstring"] = "This entry is missing a docstring value"
assoctableentries = "".join([html_doc_templates['association_table_entry'].substitute(
subject=str(assocval["domain"]).replace("'", ""),
predicate=assockey,
object=str(assocval["range"]).replace("'", ""),
description=str(assocval["docstring"]).replace("'", ""),
constraints=str(assocval.get("cardinality", "n,n"))) for assockey, assocval in related_associations.iteritems()])
super_classes = ""
sub_classes = ""
sup = super_class
super_class_attribute_tables = ""
class_type = self._get_class_type(objname)
while sup != "IonObjectBase":
sup_class_type = self._get_class_type(sup)
if sup_class_type == "resource":
anchor = '<a href="Resource+Spec+for+' + sup + '.html">' + sup + '</a>'
else:
anchor = '<a href="Object+Spec+for+' + sup + '.html">' + sup + '</a>'
super_classes += anchor + ', '
fld_details = self.class_args_dict[sup]["field_details"]
superclassattrtableentries = ""
fld_details.sort()
for fld_detail in fld_details:
att_comments = cgi.escape(fld_detail[3].strip(' ,#').replace('#', ''))
superclassattrtableentries += html_doc_templates['attribute_table_entry'].substitute(
attrname=fld_detail[0], type=fld_detail[1].replace("'", '"'),
default=fld_detail[2].replace("'", '"'), decorators=cgi.escape(fld_detail[4]),
attrcomment=att_comments)
super_class_attribute_tables += html_doc_templates['super_class_attribute_table'].substitute(
super_class_name=anchor,
superclassattrtableentries=superclassattrtableentries)
sup = self.class_args_dict[sup]["extends"]
super_classes += '<a href="Object+Spec+for+IonObjectBase">IonObjectBase</a>'
for okey, oval in self.class_args_dict.iteritems():
if oval['extends'] == objname:
otype = self._get_class_type(okey)
if otype == "resource":
sub_classes += '<a href="Resource+Spec+for+' + okey + '.html">' + okey + '</a>' + ', '
else:
sub_classes += '<a href="Object+Spec+for+' + okey + '.html">' + okey + '</a>' + ', '
if sub_classes:
sub_classes = sub_classes[:-2]
csv_description = objschema['description']
self.csv_types_row_entries.append([objname, class_type, super_class, csv_description.strip(' ,#').replace('#','')])
doc_output = html_doc_templates['obj_doc'].substitute(
classname=objname, baseclasses=super_classes, subclasses=sub_classes,
classcomment=cgi.escape(objschema["description"]), decorators=objschema["decorators"],
attrtableentries=attrtableentries,
super_class_attr_tables=super_class_attribute_tables,
assoctableentries=assoctableentries)
datamodelhtmlfile = os.path.join(datamodelhtmldir, objname + ".html")
try:
os.unlink(datamodelhtmlfile)
except:
pass
with open(datamodelhtmlfile, 'w') as f:
f.write(doc_output)
datadir = 'interface'
objecttypecsvfile = os.path.join(datadir, 'objecttypes.csv')
try:
os.unlink(objecttypecsvfile)
except:
pass
print " Writing object type csv to '" + objecttypecsvfile + "'"
csv_file = csv.writer(open(objecttypecsvfile, 'wb'), delimiter=',',
quotechar='"', quoting=csv.QUOTE_ALL)
csv_file.writerow(["object type", "object class", "extends", "description"])
csv_file.writerows(self.csv_types_row_entries)
objectattrscsvfile = os.path.join(datadir, 'objectattrs.csv')
try:
os.unlink(objectattrscsvfile)
except:
pass
obj_types = {}
for row in self.csv_types_row_entries:
obj_types[row[0]] = row[1]
for row in self.csv_attributes_row_entries:
row[0] = obj_types.get(row[1], "")
# The following was requested by Karen S: Need to associate a persistent identifier for a known
# object type, attribute name combination
objattr_ids = {}
objid_filename = "res/config/object_attribute_ids.csv"
try:
if os.path.exists(objid_filename):
with open(objid_filename, "rU") as csvfile:
idreader = csv.DictReader(csvfile, delimiter=',')
for row in idreader:
oname, aname, refid = row['YML Resource Type'], row['YML Name'], row['__pk_ResourceAttribute_ID']
objattr_ids[(oname, aname)] = refid
for row in self.csv_attributes_row_entries:
row[3] = objattr_ids.get((row[1], row[2]), "")
except Exception as ex:
print "ERROR reading object/attribute mapping file", objid_filename, ex
print " Writing object attribute csv to '" + objectattrscsvfile + "'"
csv_file = csv.writer(open(objectattrscsvfile, 'wb'), delimiter=',',
quotechar='"', quoting=csv.QUOTE_ALL)
csv_file.writerow(["object class", "object type", "attribute name", "ref id", "attribute type", "attribute default", "description"])
csv_file.writerows(self.csv_attributes_row_entries)
def _lookup_associations(self, classname):
"""
Returns dict of associations for given object type (not base types)
"""
from pyon.util.config import Config
from pyon.util.containers import DotDict
if not self._associations:
self._associations = DotDict()
assoc_defs = Config(["res/config/associations.yml"]).data['AssociationDefinitions']
self._associations.update((ad['predicate'], ad) for ad in assoc_defs)
output = {}
for key in self._associations:
domain = str(self._associations[key]["domain"])
range = str(self._associations[key]["range"])
if classname in domain or classname in range:
output[key] = self._associations[key]
return output
# Determine if class is object or resource or event
def _get_class_type(self, clzzname):
while clzzname != "IonObjectBase":
if clzzname == "Resource":
return "resource"
elif clzzname == "Event":
return "event"
clzzname = self.class_args_dict[clzzname]["extends"]
return "object"
def convert_val(self, value):
"""
Recursively generates right hand value for a class attribute.
"""
if isinstance(value, list):
outline = '['
first_time = True
for val in value:
if first_time:
first_time = False
else:
outline += ", "
outline += self.convert_val(val)
outline += ']'
elif isinstance(value, dict) and "__IsEnum" in value:
outline = value["value"]
elif isinstance(value, OrderedDict):
outline = '{'
first_time = True
for key in value:
if first_time:
first_time = False
else:
outline += ", "
outline += "'" + key + "': " + self.convert_val(value[key])
outline += '}'
elif isinstance(value, str):
outline = "'" + value + "'"
else:
outline = str(value)
return outline
def write_files(self, opts):
"""
Write object model to object.py file and optionally csv files
"""
datadir = 'interface'
if not os.path.exists(datadir):
os.makedirs(datadir)
open(os.path.join(datadir, '__init__.py'), 'w').close()
datamodelfile = os.path.join(datadir, 'objects.py')
try:
os.unlink(datamodelfile)
except:
pass
print " Writing object interfaces to '" + datamodelfile + "'"
with open(datamodelfile, 'w') as f:
f.write(self.dataobject_output_text)
| |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations
import abc
import importlib
import pkgutil
from typing import Optional, Iterable, Tuple, List, Iterator, Callable, Type
import parlai.mutators
from parlai.core.params import ParlaiParser
from parlai.core.opt import Opt
from parlai.core.message import Message
MUTATOR_REGISTRY: dict[str, Type] = {}
def setup_mutator_registry():
"""
Loads the mutators so that @register_mutator is hit for all.
"""
global MUTATOR_REGISTRY
if hasattr(setup_mutator_registry, 'loaded'):
return
for module in pkgutil.iter_modules(parlai.mutators.__path__, 'parlai.mutators.'):
importlib.import_module(module.name)
try:
import parlai_fb.mutators
for module in pkgutil.iter_modules(
parlai_fb.mutators.__path__, 'parlai_fb.mutators.'
):
importlib.import_module(module.name)
except ImportError:
pass
try:
import parlai_internal.mutators
for module in pkgutil.iter_modules(
parlai_internal.mutators.__path__, 'parlai_internal.mutators.'
):
importlib.import_module(module.name)
except ImportError:
pass
setup_mutator_registry.loaded = True
return MUTATOR_REGISTRY
def register_mutator(name: str) -> Callable[[Type], Type]:
"""
Register a mutator.
"""
def _inner(cls_: Type) -> Type:
global MUTATOR_REGISTRY
if name in MUTATOR_REGISTRY and cls_ is not MUTATOR_REGISTRY[name]:
raise NameError(
"Mutators must be uniquely named, but detected two mutators with "
f"the name '{name}'."
)
MUTATOR_REGISTRY[name] = cls_
return cls_
return _inner
class Mutator(abc.ABC):
"""
Base class for mutators.
Users are not advised to use this class.
"""
@classmethod
def load_mutator_types(cls, mutator_names: Optional[str]) -> List[Type]:
"""
Map mutator names to actual classes via the registry.
:param mutator_names:
A list of one or more mutators separated by '+'. E.g.
'flatten+word_shuffle'.
:returns: a list of mutators
"""
global MUTATOR_REGISTRY
setup_mutator_registry()
if not mutator_names:
return []
assert isinstance(mutator_names, str)
names = mutator_names.replace('+', ',').split(',')
mutators = [MUTATOR_REGISTRY[name] for name in names]
return mutators
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
pass
def __init__(self, opt):
self.opt = opt
def _pop_episode_done(self, message: Message) -> Tuple[Message, bool]:
try:
episode_done = message.pop('episode_done')
except KeyError:
episode_done = False
return message, episode_done
def _group_into_episodes(
self, message_stream: Iterable[Message]
) -> Iterator[List[Message]]:
"""
Apply fn to grouped episodes, yielding back the results of the application.
"""
episode: List[Message] = []
for message in message_stream:
if message.is_padding():
assert not episode
yield [message]
continue
message, episode_done = self._pop_episode_done(message)
episode.append(message)
if episode_done:
yield episode
episode = []
if episode:
yield episode
def _add_episode_done(self, episode: List[Message]) -> List[Message]:
for i, message in enumerate(episode):
message['episode_done'] = i == len(episode) - 1
return episode
@abc.abstractmethod
def __call__(self, messages: Iterable[Message]) -> Iterator[Message]:
pass
class MessageMutator(Mutator):
"""
Message-level mutators.
Message-level mutators have a function applied per-utterance. They are ideal
for transformations of data which don't create any new conversations or
turns, but only apply simple text-transformations.
Examples include:
* Shuffling words in context
* Adding a special token based on a non-text field
* Replacing words with synonyms or other simple augmentations
"""
@abc.abstractmethod
def message_mutation(self, message: Message) -> Message:
"""
Abstract message mutation.
The main method to implement when implementing an MessageMutator.
:param message:
An individual message you should mutate.
:returns:
The mutated message.
"""
pass
def __call__(self, messages: Iterable[Message]) -> Iterator[Message]:
"""
Apply the mutator to a series of messages.
Not meant to be called directly by a user.
"""
for message in messages:
if message.is_padding():
yield message
continue
message, episode_done = self._pop_episode_done(message)
message = self.message_mutation(message)
if 'episode_done' in message:
raise ValueError('MessageMutators should not modify episode_done.')
message['episode_done'] = episode_done
yield message
class EpisodeMutator(Mutator):
"""
Episode-level mutators.
"""
@abc.abstractmethod
def episode_mutation(self, episode: List[Message]) -> List[Message]:
"""
Abstract episode mutation.
The main method to implement when implementing an EpisodeMutator.
The "episode_done" field will be automatically stripped before providing
as input, and automatically added back to the finalized episode.
:param messages:
All the messages in one episode. You may manipulate any or all of
them, or change the ordering entirely.
:returns:
The new, mutated episode.
"""
pass
def __call__(self, messages: Iterable[Message]) -> Iterator[Message]:
"""
Apply the mutator to a series of messages.
Not meant to be called directly by a user.
"""
for episode in self._group_into_episodes(messages):
if episode and episode[0].is_padding():
for message in episode:
yield message
else:
mutated_episode = self._add_episode_done(self.episode_mutation(episode))
yield from mutated_episode
class ManyEpisodeMutator(Mutator):
"""
Episode mutator than can map one episode to zero or more.
"""
@abc.abstractmethod
def many_episode_mutation(self, episode: List[Message]) -> List[List[Message]]:
"""
Abstract many-episode mutation.
The main method to implement when creation a ManyEpisodeMutator.
You should map this episode to zero-or-more episodes.
If you wish to create multiple episodes, you need to output
one-sublist-per-new-episode. As with EpisodeMutator, "episode_done"
will be automatically stripped and re-inserted for you.
:param episode:
A single episode (provided list of Messages).
:returns:
A list of list of messages. Each sub-list will be turned into a new
episode.
"""
pass
def __call__(self, messages: Iterable[Message]) -> Iterator[Message]:
"""
Apply the mutator to a series of messages.
Not meant to be called directly by a user.
"""
for episode in self._group_into_episodes(messages):
if episode and episode[0].is_padding():
yield from episode
else:
mutated_episodes = self.many_episode_mutation(episode)
for mutated_episode in mutated_episodes:
yield from self._add_episode_done(mutated_episode)
| |
import subprocess as sub
# -----> * Filing * <-----
# ask the user the name of the game to create a .txt file
print("Enter game's name:")
name_file = input("")
name_file_string = str(name_file)
file_name = name_file_string + ".txt"
# create a writable .txt file
safile = open(file_name, "w")
safile.write("This game's stats were:\n")
# -----> * Team Separator * <-----
home, away, date = name_file_string.split('-')
# -----> * Introduction * <-----
intro = '''
Welcome to Siball-Analysis v0.5.5-beta. If you do not know the commands type help to open the howto file.
I hope you enjoy the game between %s and %s. '''
print(intro % (home, away))
# -----> * Colors Variables * <-----
red = "\033[1;31;50m%s"
blue = "\033[1;34;50m%s"
white = "\033[1;38;50m%s"
# -----> * Penalty Kicks Variables * <-----
# create a range from 0 to 1000
pk_range = range(0, 1000)
# list the range to be accessible
pk_list = list(pk_range)
pk_range_goal = range(0, 1000)
pk_list_goal = list(pk_range_goal)
pk_range_saved = range(0, 1000)
pk_list_saved = list(pk_range_saved)
pk_range_missed = range(0, 1000)
pk_list_missed = list(pk_range_missed)
pk_input = '''goal/saved/missed
'''
goal_pk, saved_pk, missed_pk = pk_input.split("/")
# a list of possible words that the user may type in order to record a stat
pk_words = ["pk", "penalty kick", "Penalty kick", "Penalty Kick", "penalty Kick", "PK", "Pk", "pK", "a1ta", "penalty kicks", "Penalty kicks", "Penalty Kicks", "penalty Kicks", "penalty kick ", "Penalty kick ", "Penalty Kick ", "penalty Kick ", "penalty kicks ", "Penalty kicks ", "Penalty Kicks ", "penalty Kicks ", "01"]
goal_words = ["goal", "Goal", "GOAL", "goal ", "Goal ", "GOAL ", "011"]
missed_words = ["missed", "Missed", "MISSED", "missed ", "Missed ", "MISSED ", "miss", "Miss", "MISS", "miss ", "Miss ", "MISS ", "012"]
saved_words = ["saved", "Saved", "SAVED", "saved ", "Saved ", "SAVED ", "save", "Save", "SAVE", "save ", "Save ", "SAVE ", "013"]
# -----> * Free Kicks Variables <-----
fk_range = range(0, 1000)
fk_list = list(fk_range)
fk_range_gd = range(0, 1000)
fk_list_gd = list(fk_range_gd)
fk_range_pd = range(0, 1000)
fk_list_pd = list(fk_range_pd)
fk_input = '''gd/pd
'''
gd_fk, pd_fk = fk_input.split("/")
fk_words = ["fk", "free kick", "Free kick", "Free Kick", "free Kick", "FK", "Fk", "fK", "a1ta", "free kicks", "Free kicks", "Free Kicks", "free Kicks", "free kick ", "Free kick ", "Free Kick ", "free Kick ", "free kicks ", "Free kicks ", "Free Kicks ", "free Kicks ", "02"]
fk_gd_words = ["gd", "GD", "gd ", "GD ", "021"]
fk_pd_words = ["pd", "PD", "pd ", "PD ", "022"]
# -----> * Corner Kicks Variables * <-----
ck_range = range(0, 1000)
ck_list = list(ck_range)
ck_range_gd = range(0, 1000)
ck_list_gd = list(ck_range_gd)
ck_range_pd = range(0, 1000)
ck_list_pd = list(ck_range_pd)
ck_input = '''gd/pd
'''
gd_ck, pd_ck = ck_input.split("/")
ck_words = ["ck", "corner kick", "Corner kick", "Corner Kick", "corner Kick", "CK", "Ck", "cK", "a1ta", "corner kicks", "Corner kicks", "Corner Kicks", "corner Kicks", "corner kick ", "Corner kick ", "Corner Kick ", "corner Kick ", "corner kicks ", "Corner kicks ", "Corner Kicks ", "corner Kicks ", "03"]
ck_gd_words = ["gd", "GD", "gd ", "GD ", "031"]
ck_pd_words = ["pd", "PD", "pd ", "PD ", "032"]
# -----> * Throw Ins Variables * <-----
ti_range = range(0, 1000)
ti_list = list(ti_range)
ti_range_gd = range(0, 1000)
ti_list_gd = list(ti_range_gd)
ti_range_pd = range(0, 1000)
ti_list_pd = list(ti_range_pd)
ti_input = '''gd/pd
'''
gd_ti, pd_ti = ti_input.split("/")
ti_words = ["ti", "throw in", "Throw in", "Throw In", "throw In", "TI", "Ti", "tI", "a1ta", "throw ins", "Throw ins", "Throw Ins", "throw Ins", "throw in ", "Throw in ", "Throw In ", "throw In ", "throw ins ", "Throw ins ", "Throw Ins ", "throw Ins ", "04"]
ti_gd_words = ["gd", "GD", "gd ", "GD ", "041"]
ti_pd_words = ["pd", "PD", "pd ", "PD ", "042"]
# -----> * Crosses Variables * <-----
crosses_range = range(0, 1000)
crosses_list = list(crosses_range)
crosses_range_gd = range(0, 1000)
crosses_list_gd = list(crosses_range_gd)
crosses_range_pd = range(0, 1000)
crosses_list_pd = list(crosses_range_pd)
crosses_input = '''gd/pd
'''
gd_cross, pd_cross = crosses_input.split("/")
cross_words = ["cross", "Cross", "Cross ", "cross ", "crosses", "Crosses", "Crosses ", "crosses ", "a1ta", "05"]
cross_gd_words = ["gd", "GD", "gd ", "GD ", "051"]
cross_pd_words = ["pd", "PD", "pd ", "PD ", "052"]
# -----> * True vs True Variables * <-----
v1_range = range(0, 1000)
v1_list = list(v1_range)
v1_range_w = range(0, 1000)
v1_list_w = list(v1_range_w)
v1_range_l = range(0, 1000)
v1_list_l = list(v1_range_l)
v1_input = '''w/l
'''
w_v1, l_v1 = v1_input.split("/")
v1_words = ["1v1", "1vs1", "1 versus 1", "1 Versus 1", "1VS1", "1v1 ", "1vs1 ", "1 versus 1 ", "1 Versus 1 ", "1VS1 ", "a1ta", "06"]
w_words = ["w", "W", "w ","W ", "won", "Won", "WON", "won ", "Won ", "WON ", "061"]
l_words = ["l", "L", "l ","L ", "lost", "Lost", "LOST", "lost ", "Lost ", "LOST ", "062"]
# -----> * Shots Variables * <-----
shots_range = range(0, 1000)
shots_list = list(shots_range)
shots_range_gd = range(0, 1000)
shots_list_gd = list(shots_range_gd)
shots_range_pd = range(0, 1000)
shots_list_pd = list(shots_range_pd)
shots_input = '''on target/off target
'''
ont_shot, oft_shot = shots_input.split("/")
shot_words = ["shot", "Shot", "SHOT", "shot ", "Shot ", "SHOT ", "shots", "Shots", "SHOTS", "shots ", "Shots ", "SHOTS ", "a1ta", "07"]
shot_ont_words = ["on target", "On target", "On Target", "ON TARGET", "on target ", "On target ", "On Target ", "ON TARGET ", "ont", "ONT", "ont ", "ONT ", "071"]
shot_oft_words = ["off target", "Off target", "Off Target", "OFF TARGET", "off target ", "Off target ", "Off Target ", "OFF TARGET ", "oft", "OFT", "oft ", "OFT ", "072"]
# -----> * Headers Variables * <-----
headers_range = range(0, 1000)
headers_list = list(headers_range)
headers_range_gd = range(0, 1000)
headers_list_gd = list(headers_range_gd)
headers_range_pd = range(0, 1000)
headers_list_pd = list(headers_range_pd)
headers_input = '''on target/off target
'''
ont_header, oft_header = headers_input.split("/")
header_words = ["header", "Header", "HEADER", "header ", "Header ", "HEADER ", "header", "Header", "HEADER", "headers ", "Headers ", "HEADERS ", "a1ta", "08"]
header_ont_words = ["on target", "On target", "On Target", "ON TARGET", "on target ", "On target ", "On Target ", "ON TARGET ", "ont", "ONT", "ont ", "ONT ", "081"]
header_oft_words = ["off target", "Off target", "Off Target", "OFF TARGET", "off target ", "Off target ", "Off Target ", "OFF TARGET ", "oft", "OFT", "oft ", "OFT ", "082"]
# -----> * Saves Variables * <-----
saves_range = range(0, 1000)
saves_list = list(saves_range)
save_words = ["save", "Save", "SAVE", "saves", "Saves", "SAVES", "save ", "Save ", "SAVE ", "saves ", "Saves ", "SAVES ", "a1ta", "09"]
# -----> * Long Pass Variables * <-----
long_passes_range = range(0, 1000)
long_passes_list = list(long_passes_range)
long_passes_range_second_ball_defense = range(0, 1000)
long_passes_list_second_ball_defense = list(long_passes_range_second_ball_defense)
long_passes_range_third_ball_defense = range(0, 1000)
long_passes_list_third_ball_defense = list(long_passes_range_third_ball_defense)
long_passes_range_attack = range(0, 1000)
long_passes_list_attack = list(long_passes_range_attack)
long_passes_range_second_ball_attack = range(0, 1000)
long_passes_list_second_ball_attack = list(long_passes_range_second_ball_attack)
long_passes_range_third_ball_attack = range(0, 1000)
long_passes_list_third_ball_attack = list(long_passes_range_third_ball_attack)
long_passes_range_defense = range(0, 1000)
long_passes_list_defense = list(long_passes_range_defense)
long_passes_input_defense = '''second_ball/third_ball
'''
long_passes_input = '''attack/defence
'''
attack_lpi, defense_lpi = long_passes_input.split("/")
long_passes_input_attack = '''second ball/third ball
'''
long_pass_words = ["long pass", "Long pass", "long pass ", "Long Pass ", "LP", "lp", "LP ", "lp ", "long pass interception", "Long pass interception", "long pass interception ", "Long Pass Interception ", "LPI", "lpi", "LPI ", "lpi ", "a1ta", "10"]
lpi_attack_words = ["attack", "Attack", "ATTACK", "attack ", "Attack ", "ATTACK ", "a", "33", "3/3", "33 ", "3/3 ", "101"]
attack_sb_words = ["second ball", "Second ball", "SECOND BALL ", "second ball ", "Second ball ", "SECOND BALL ", "SB", "sb", "SB ", "sb ", "1011"]
attack_tb_words = ["third ball", "Third ball", "THIRD BALL ", "third ball ", "Third ball ", "THIRD BALL ", "TB", "tb", "TB ", "tb ", "1012"]
defense_words = ["defense", "Defense", "DEFENSE", "defense ", "Defense ", "DEFENSE ", "d", "13", "1/3", "13 ", "1/3", "102"]
defense_sb_words = ["second ball", "Second ball", "SECOND BALL ", "second ball ", "Second ball ", "SECOND BALL ", "SB", "sb", "SB ", "sb ", "1021"]
defense_tb_words = ["third ball", "Third ball", "THIRD BALL ", "third ball ", "Third ball ", "THIRD BALL ", "TB", "tb", "TB ", "tb ", "1022"]
# -----> * Possession Loss Variables * <-----
pl_range = range(0, 1000)
pl_list = list(pl_range)
pl_range_33 = range(0, 1000)
pl_list_33 = list(pl_range_33)
pl_range_33_23 = range(0, 1000)
pl_list_33_23 = list(pl_range_33_23)
pl_range_33_13 = range(0, 1000)
pl_list_33_13 = list(pl_range_33_13)
pl_range_23 = range(0, 1000)
pl_list_23 = list(pl_range_23)
pl_range_23_33 = range(0, 1000)
pl_list_23_33 = list(pl_range_23_33)
pl_range_23_13 = range(0, 1000)
pl_list_23_13 = list(pl_range_23_13)
pl_range_13 = range(0, 1000)
pl_list_13 = list(pl_range_13)
pl_range_13_33 = range(0, 1000)
pl_list_13_33 = list(pl_range_13_33)
pl_range_13_23 = range(0, 1000)
pl_list_13_23 = list(pl_range_13_33)
pl_input = '''3/3
2/3
1/3
'''
attack_print = '''
2/3
1/3
'''
midfield_print = '''
3/3
1/3
'''
defense_print = '''
3/3
2/3
'''
pl_words = ["pl", "PL", "Possession Loss", "Possession loss", "possession loss", "pl ", "PL ", "Possession Loss ", "Possession loss ", "possession loss ", "a1ta", "11"]
pl_attack_words = ["attack", "Attack", "ATTACK", "attack ", "Attack ", "ATTACK ", "a", "33", "3/3", "33 ", "3/3 ", "111"]
attack_midfield_words = ["midfield", "Midfield", "MIDFIELD", "midfield ", "Midfield ", "MIDFIELD ", "m", "23", "2/3", "23 ", "2/3 ", "1111"]
attack_defense_words = ["defense", "Defense", "DEFENSE", "defense ", "Defense ", "DEFENSE ", "d", "13", "1/3", "13 ", "1/3", "1112"]
pl_midfield_words = ["midfield", "Midfield", "MIDFIELD", "midfield ", "Midfield ", "MIDFIELD ", "m", "23", "2/3", "23 ", "2/3 ", "112"]
midfield_attack_words = ["attack", "Attack", "ATTACK", "attack ", "Attack ", "ATTACK ", "a", "33", "3/3", "33 ", "3/3 ", "1121"]
midfield_defense_words = ["defense", "Defense", "DEFENSE", "defense ", "Defense ", "DEFENSE ", "d", "13", "1/3", "13 ", "1/3", "1122"]
pl_defense_words = ["defense", "Defense", "DEFENSE", "defense ", "Defense ", "DEFENSE ", "d", "13", "1/3", "13 ", "1/3", "113"]
defense_attack_words = ["attack", "Attack", "ATTACK", "attack ", "Attack ", "ATTACK ", "a", "33", "3/3", "33 ", "3/3 ", "1131"]
defense_midfield_words = ["midfield", "Midfield", "MIDFIELD", "midfield ", "Midfield ", "MIDFIELD ", "m", "23", "2/3", "23 ", "2/3 ", "1132"]
# -----> * Offside * <-----
offside_range = range(0, 1000)
offside_list = list(offside_range)
offside_words = ["offside", "Offside", "offside ", "Offside ", "a1ta", "12"]
# -----> * Main Function * <-----
def body():
# global each variable to be able to be referenced later when you print/write the total stat on each game at the end
global string_number_pk_goal, string_number_pk_missed, string_number_pk_saved, string_number_pk, string_number_fk_gd, string_number_offside
global string_number_fk_pd, string_number_fk, string_number_ck_pd, string_number_ck_gd, string_number_ck
global string_number_crosses_gd, string_number_crosses_pd, string_number_crosses, string_number_shots
global string_number_headers_pd, string_number_headers_gd, string_number_shots_pd, string_number_shots_gd
global string_number_v1_l, string_number_v1, string_number_long_passes_second_ball, string_number_v1_w
global string_number_long_passes_third_ball, string_number_long_passes, string_number_save, string_number_headers
global string_number_ti_gd, string_number_ti, string_number_long_passes_defense, string_number_pl_23
global string_number_long_passes_second_ball_defense, string_number_long_passes_third_ball_defense
global string_number_long_passes_attack, string_number_long_passes_third_ball_attack, string_number_pl_23_13
global string_number_long_passes_second_ball_attack, string_number_ti_pd, string_number_pl_13_33
global string_number_pl_13_23, string_number_pl_13, midfield_input, string_number_pl_33_23
global string_number_pl_33, string_number_pl_23_33, string_number_pl_33_13, string_number_pl
# false each variable to declare if a stat was called before or not
int_number_pk_goal = False
int_number_pk_saved = False
int_number_pk_missed = False
int_number_pk = False
int_number_fk_gd = False
int_number_fk_pd = False
int_number_fk = False
int_number_ck_gd = False
int_number_ck_pd = False
int_number_ck = False
int_number_ti_gd = False
int_number_ti_pd = False
int_number_ti = False
int_number_crosses_gd = False
int_number_crosses_pd = False
int_number_crosses = False
int_number_shots_gd = False
int_number_shots_pd = False
int_number_shots = False
int_number_headers_gd = False
int_number_headers_pd = False
int_number_headers = False
int_number_v1_w = False
int_number_v1_l = False
int_number_v1 = False
int_number_long_passes_second_ball_attack = False
int_number_long_passes_third_ball_attack = False
int_number_long_passes_attack = False
int_number_long_passes_second_ball_defense = False
int_number_long_passes_third_ball_defense = False
int_number_long_passes_defense = False
int_number_long_passes = False
int_number_saves = False
int_number_pl_33_23 = False
int_number_pl_33_13 = False
int_number_pl_23_33 = False
int_number_pl_23_13 = False
int_number_pl_23 = False
int_number_pl_33 = False
int_number_pl_13_33 = False
int_number_pl_13_23 = False
int_number_pl_13 = False
int_number_pl = False
int_number_offside = False
# it creates a while loop so the function will go on and on until a the user decides to quit the function
while True:
# the user decides which stat to record by calling it
choice = input()
# -----> * Penalty Kicks Function * <-----
# user inserts one of the words in the list
if choice in pk_words:
# the user decides whether a stat was successful or not
print(blue % goal_pk, red % saved_pk, red % missed_pk)
good_bad_input_pk = input()
if good_bad_input_pk in goal_words:
# take the first number on the list
first_number_pk_goal = pk_list_goal[0]
# remove the first number on the list. The first time it is called it deletes 0
pk_list_goal.remove(first_number_pk_goal)
# takes the next number on a list which is the number that will be called
number_pk_goal = pk_list_goal[0]
# strings the number so it will be printable/writable
string_number_pk_goal = str(number_pk_goal)
# true the integer so it knows it was called
int_number_pk_goal = True
pk_print_string_goal = "Penalty Kick goal(s): " + string_number_pk_goal
# print a nice introduction and the time a stat was called
print(blue % pk_print_string_goal)
elif good_bad_input_pk in save_words:
first_number_pk_saved = pk_list_saved[0]
pk_list_saved.remove(first_number_pk_saved)
number_pk_saved = pk_list_saved[0]
string_number_pk_saved = str(number_pk_saved)
int_number_pk_saved = True
pk_print_string_saved = "Penalty Kick(s) saved: " + string_number_pk_saved
print(red % pk_print_string_saved)
elif good_bad_input_pk in missed_words:
first_number_pk_missed = pk_list_missed[0]
pk_list_missed.remove(first_number_pk_missed)
number_pk_missed = pk_list_missed[0]
string_number_pk_missed = str(number_pk_missed)
int_number_pk_missed = True
pk_print_string_missed = "Penalty Kick(s) missed: " + string_number_pk_missed
print(red % pk_print_string_missed)
first_number_pk = pk_list[0]
pk_list.remove(first_number_pk)
number_pk = pk_list[0]
string_number_pk = str(number_pk)
int_number_pk = True
pk_print_string = "Penalty Kick(s) : " + string_number_pk
print(white % pk_print_string)
# -----> * Free Kicks Function * <-----
elif choice in fk_words:
print(blue % gd_fk, red % pd_fk)
good_bad_input_fk = input()
if good_bad_input_fk in fk_gd_words:
first_number_fk_gd = fk_list_gd[0]
fk_list_gd.remove(first_number_fk_gd)
number_fk_gd = fk_list_gd[0]
string_number_fk_gd = str(number_fk_gd)
int_number_fk_gd = True
fk_print_string_gd = "Free Kick(s) with a Good Delivery: " + string_number_fk_gd
print(blue % fk_print_string_gd)
elif good_bad_input_fk in fk_pd_words:
first_number_fk_pd = fk_list_pd[0]
fk_list_pd.remove(first_number_fk_pd)
number_fk_pd = fk_list_pd[0]
string_number_fk_pd = str(number_fk_pd)
int_number_fk_pd = True
fk_print_string_pd = "Free Kick(s) with a Poor Delivery: " + string_number_fk_pd
print(red % fk_print_string_pd)
first_number_fk = fk_list[0]
fk_list.remove(first_number_fk)
number_fk = fk_list[0]
string_number_fk = str(number_fk)
int_number_fk = True
fk_print_string = "Free Kick(s)" + string_number_fk
print(white % fk_print_string)
# -----> * Corner Kick Variables * <-----
elif choice in ck_words:
print(blue % gd_ck, red % pd_ck)
good_bad_input_ck = input()
if good_bad_input_ck in ck_gd_words:
first_number_ck_gd = ck_list_gd[0]
ck_list_gd.remove(first_number_ck_gd)
number_ck_gd = ck_list_gd[0]
string_number_ck_gd = str(number_ck_gd)
int_number_ck_gd = True
ck_print_string_gd = "Corner Kick(s) with a Good Delivery: " + string_number_ck_gd
print(blue % ck_print_string_gd)
elif good_bad_input_ck in ck_pd_words:
first_number_ck_pd = ck_list_pd[0]
ck_list_pd.remove(first_number_ck_pd)
number_ck_pd = ck_list_pd[0]
string_number_ck_pd = str(number_ck_pd)
int_number_ck_pd = True
ck_print_string_pd = "Corner Kick(s) with a Poor Delivery: " + string_number_ck_pd
print(red % ck_print_string_pd)
first_number_ck = ck_list[0]
ck_list.remove(first_number_ck)
number_ck = ck_list[0]
string_number_ck = str(number_ck)
int_number_ck = True
ck_print_string = "Corner Kick(s): " + string_number_ck
print(white % ck_print_string)
# -----> * Throw Ins Functions * <-----
elif choice in ti_words:
print(blue % gd_ti, red % pd_ti)
good_bad_input_ti = input()
if good_bad_input_ti in ti_gd_words:
first_number_ti_gd = ti_list_gd[0]
ti_list_gd.remove(first_number_ti_gd)
number_ti_gd = ti_list_gd[0]
string_number_ti_gd = str(number_ti_gd)
int_number_ti_gd = True
ti_print_string_gd = "Throw In(s) with a Good Delivery: " + string_number_ti_gd
print(blue % ti_print_string_gd)
elif good_bad_input_ti in ti_pd_words:
first_number_ti_pd = ti_list_pd[0]
ti_list_pd.remove(first_number_ti_pd)
number_ti_pd = ti_list_pd[0]
string_number_ti_pd = str(number_ti_pd)
int_number_ti_pd = True
ti_print_string_pd = "Throw In(s) with a Poor Delivery: " + string_number_ti_pd
print(red % ti_print_string_pd)
first_number_ti = ti_list[0]
ti_list.remove(first_number_ti)
number_ti = ti_list[0]
string_number_ti = str(number_ti)
int_number_ti = True
ti_print_string = "Throw In(s): " + string_number_ti
print(white % ti_print_string)
# -----> * Crosses Function * <-----
elif choice in cross_words:
print(blue % gd_cross, red % pd_cross)
good_bad_input_crosses = input()
if good_bad_input_crosses in cross_gd_words:
first_number_crosses_gd = crosses_list_gd[0]
crosses_list_gd.remove(first_number_crosses_gd)
number_crosses_gd = crosses_list_gd[0]
string_number_crosses_gd = str(number_crosses_gd)
int_number_crosses_gd = True
cross_print_string_gd = "Cross(es) with a Good Delivery: " + string_number_crosses_gd
print(blue % cross_print_string_gd)
elif good_bad_input_crosses in cross_pd_words:
first_number_crosses_pd = crosses_list_pd[0]
crosses_list_pd.remove(first_number_crosses_pd)
number_crosses_pd = crosses_list_pd[0]
string_number_crosses_pd = str(number_crosses_pd)
int_number_crosses_pd = True
cross_print_string_pd = "Cross(es) with a Poor Delivery: ", red % string_number_crosses_pd
print(red % cross_print_string_pd)
first_number_crosses = crosses_list[0]
crosses_list.remove(first_number_crosses)
number_crosses = crosses_list[0]
string_number_crosses = str(number_crosses)
int_number_crosses = True
cross_print_string = "Cross(es): " + string_number_crosses
print(white % cross_print_string)
# -----> * 1 versus 1 Function * <-----
elif choice in v1_words:
print(blue % w_v1, red % l_v1)
good_bad_input_v1 = input()
if good_bad_input_v1 in w_words:
first_number_v1_w = v1_list_w[0]
v1_list_w.remove(first_number_v1_w)
number_v1_w = v1_list_w[0]
string_number_v1_w = str(number_v1_w)
int_number_v1_w = True
v1_print_string_w = "Won 1vs1: " + string_number_v1_w
print(blue % v1_print_string_w)
elif good_bad_input_v1 in l_words:
first_number_v1_l = v1_list_l[0]
v1_list_l.remove(first_number_v1_l)
number_v1_l = v1_list_l[0]
string_number_v1_l = str(number_v1_l)
int_number_v1_l = True
v1_print_string_l = "Lost 1vs1: " + string_number_v1_l
print(red % v1_print_string_l)
first_number_v1 = v1_list[0]
v1_list.remove(first_number_v1)
number_v1 = v1_list[0]
string_number_v1 = str(number_v1)
int_number_v1 = True
v1_print_string = "1vs1: " + string_number_v1
print(white % v1_print_string)
# -----> * Shots Function * <-----
elif choice in shot_words:
print(blue % ont_shot, red % oft_shot)
good_bad_input_shots = input()
if good_bad_input_shots in shot_ont_words:
first_number_shots_gd = shots_list_gd[0]
shots_list_gd.remove(first_number_shots_gd)
number_shots_gd = shots_list_gd[0]
string_number_shots_gd = str(number_shots_gd)
int_number_shots_gd = True
shot_print_string_ont = "Shot(s) on target: " + string_number_shots_gd
print(blue % shot_print_string_ont)
elif good_bad_input_shots in shot_oft_words:
first_number_shots_pd = shots_list_pd[0]
shots_list_pd.remove(first_number_shots_pd)
number_shots_pd = shots_list_pd[0]
string_number_shots_pd = str(number_shots_pd)
int_number_shots_pd = True
shot_print_string_oft = "Shot(s) off target: " + string_number_shots_pd
print(red % shot_print_string_oft)
first_number_shots = shots_list[0]
shots_list.remove(first_number_shots)
number_shots = shots_list[0]
string_number_shots = str(number_shots)
int_number_shots = True
shot_print_string = "Shot(s): " + string_number_shots
print(white % shot_print_string)
# -----> * Headers Function * <-----
elif choice in header_words:
print(blue % ont_header, red % oft_header)
good_bad_input_headers = input()
if good_bad_input_headers in header_ont_words:
first_number_headers_gd = headers_list_gd[0]
headers_list_gd.remove(first_number_headers_gd)
number_headers_gd = headers_list_gd[0]
string_number_headers_gd = str(number_headers_gd)
int_number_headers_gd = True
header_print_string_ont = "Header(s) on target: " + string_number_headers_gd
print(blue % header_print_string_ont)
elif good_bad_input_headers in header_oft_words:
first_number_headers_pd = headers_list_pd[0]
headers_list_pd.remove(first_number_headers_pd)
number_headers_pd = headers_list_pd[0]
string_number_headers_pd = str(number_headers_pd)
int_number_headers_pd = True
header_print_string_oft = "Header(s) off target: " + string_number_headers_pd
print(red % header_print_string_oft)
first_number_headers = headers_list[0]
headers_list.remove(first_number_headers)
number_headers = headers_list[0]
string_number_headers = str(number_headers)
int_number_crosses = True
header_print_string = "Header(s): ", white % string_number_headers
print(white % header_print_string)
# -----> * Long Passes * <-----
elif choice in long_pass_words:
print(blue % attack_lpi, red % defense_lpi)
attack_defense_input_long_pass = input()
if attack_defense_input_long_pass in pl_attack_words:
print(blue % long_passes_input_attack)
sec_third_input_long_pass_attack = input()
if sec_third_input_long_pass_attack in attack_sb_words:
first_number_long_passes_second_ball_attack = long_passes_list_second_ball_attack[0]
long_passes_list_second_ball_attack.remove(first_number_long_passes_second_ball_attack)
number_long_passes_second_ball_attack = long_passes_list_second_ball_attack[0]
string_number_long_passes_second_ball_attack = str(number_long_passes_second_ball_attack)
lpi_print_string_attack_sb = "Second Ball Long Pass Interceptions on Attack:" + string_number_long_passes_second_ball_attack
print(white % lpi_print_string_attack_sb)
int_number_long_passes_second_ball_attack = True
elif sec_third_input_long_pass_attack in attack_tb_words:
first_number_long_passes_third_ball_attack = long_passes_list_third_ball_attack[0]
long_passes_list_third_ball_attack.remove(first_number_long_passes_third_ball_attack)
number_long_passes_third_ball_attack = long_passes_list_third_ball_attack[0]
string_number_long_passes_third_ball_attack = str(number_long_passes_third_ball_attack)
lpi_print_string_attack_tb = "Third Ball Long Pass Interceptions on Attack:" + string_number_long_passes_third_ball_attack
print(white % lpi_print_string_attack_tb)
int_number_long_passes_third_ball_attack = True
first_number_long_passes_attack = long_passes_list_attack[0]
long_passes_list_attack.remove(first_number_long_passes_attack)
number_long_passes_attack = long_passes_list_attack[0]
string_number_long_passes_attack = str(number_long_passes_attack)
lpi_print_string_attack = "Long Pass Interceptions on Attack:" + string_number_long_passes_attack
print(white % lpi_print_string_attack)
int_number_long_passes_attack = True
elif attack_defense_input_long_pass in pl_defense_words:
print(red % long_passes_input_defense)
sec_third_input_long_pass_defense = input()
if sec_third_input_long_pass_defense in defense_sb_words:
first_number_long_passes_second_ball_defense = long_passes_list_second_ball_defense[0]
long_passes_list_second_ball_defense.remove(first_number_long_passes_second_ball_defense)
number_long_passes_second_ball_defense = long_passes_list_second_ball_defense[0]
string_number_long_passes_second_ball_defense = str(number_long_passes_second_ball_defense)
lpi_print_string_defense_sb = "Second Ball Long Pass Interceptions on Defense:" + string_number_long_passes_second_ball_defense
print(white % lpi_print_string_defense_sb)
int_number_long_passes_second_ball_defense = True
elif sec_third_input_long_pass_defense in defense_tb_words:
first_number_long_passes_third_ball_defense = long_passes_list_third_ball_defense[0]
long_passes_list_third_ball_defense.remove(first_number_long_passes_third_ball_defense)
number_long_passes_third_ball_defense = long_passes_list_third_ball_defense[0]
string_number_long_passes_third_ball_defense = str(number_long_passes_third_ball_defense)
lpi_print_string_defense_tb = "Third Ball Long Pass Interceptions on Defense:" + string_number_long_passes_third_ball_defense
print(white % lpi_print_string_defense_tb)
int_number_long_passes_third_ball_defense = True
first_number_long_passes_defense = long_passes_list_defense[0]
long_passes_list_defense.remove(first_number_long_passes_defense)
number_long_passes_defense = long_passes_list_defense[0]
string_number_long_passes_defense = str(number_long_passes_defense)
pk_print_string_goal = "Long Pass Interceptions on Defense:" + string_number_long_passes_defense
print(white % pk_print_string_goal)
int_number_long_passes_defense = True
first_number_long_passes = long_passes_list[0]
long_passes_list.remove(first_number_long_passes)
number_long_passes = long_passes_list[0]
string_number_long_passes = str(number_long_passes)
lpi_print_string = "Long Pass Interceptions: " + string_number_long_passes
print(white % lpi_print_string)
int_number_long_passes = True
# -----> * Saves * <-----
elif choice in save_words:
first_number_save = saves_list[0]
saves_list.remove(first_number_save)
number_save = saves_list[0]
string_number_save = str(number_save)
int_number_saves = True
saves_print_string = "Save(s)" + string_number_save
print(white % saves_print_string)
# -----> * Possession Loss * <-----
elif choice in pl_words:
good_bad_input_pl = input(pl_input)
if good_bad_input_pl in pl_attack_words:
attack_input = input(attack_print)
if attack_input in attack_midfield_words:
first_number_pl_33_23 = pl_list_33_23[0]
pl_list_33_23.remove(first_number_pl_33_23)
number_pl_33_23 = pl_list_33_23[0]
string_number_pl_33_23 = str(number_pl_33_23)
int_number_pl_33_23 = True
pl_print_string_33_23 = "Possession lost on offence that came from Midfield: " + string_number_pl_33_23
print(white % pl_print_string_33_23)
elif attack_input in attack_defense_words:
first_number_pl_33_13 = pl_list_33_13[0]
pl_list_33_13.remove(first_number_pl_33_13)
number_pl_33_13 = pl_list_33_13[0]
string_number_pl_33_13 = str(number_pl_33_13)
int_number_pl_33_13 = True
pl_print_string_33_13 = "Possession lost on offence that came from Defense: " + string_number_pl_33_13
print(white % pl_print_string_33_13)
first_number_pl_33 = pl_list_33[0]
pl_list_33.remove(first_number_pl_33)
number_pl_33 = pl_list_33[0]
string_number_pl_33 = str(number_pl_33)
int_number_pl_33 = True
pl_print_string_33 = "Possession lost on offence: " + string_number_pl_33
print(white % pl_print_string_33)
elif good_bad_input_pl in pl_midfield_words:
midfield_input = input(midfield_print)
if midfield_input in midfield_attack_words:
first_number_pl_23_33 = pl_list_23_33[0]
pl_list_23_33.remove(first_number_pl_23_33)
number_pl_23_33 = pl_list_23_33[0]
string_number_pl_23_33 = str(number_pl_23_33)
int_number_pl_23_33 = True
pl_print_string_23_33 = "Possession lost on midfield that came from Offense: " + string_number_pl_23_33
print(white % pl_print_string_23_33)
elif midfield_input in midfield_defense_words:
first_number_pl_23_13 = pl_list_23_13[0]
pl_list_23_13.remove(first_number_pl_23_13)
number_pl_23_13 = pl_list_23_13[0]
string_number_pl_23_13 = str(number_pl_23_13)
int_number_pl_23_13 = True
pl_print_string_23_13 = "Possession lost on midfield that came from Defense: " + string_number_pl_23_13
print(white % pl_print_string_23_13)
first_number_pl_23 = pl_list_23[0]
pl_list_23.remove(first_number_pl_23)
number_pl_23 = pl_list_23[0]
string_number_pl_23 = str(number_pl_23)
int_number_pl_23 = True
pl_print_string_23 = "Possession lost on midfield: " + string_number_pl_23
print(white % pl_print_string_23)
elif good_bad_input_pl in pl_defense_words:
defense_input = input(defense_print)
if defense_input in attack_defense_words:
first_number_pl_13_33 = pl_list_13_33[0]
pl_list_13_33.remove(first_number_pl_13_33)
number_pl_13_33 = pl_list_13_33[0]
string_number_pl_13_33 = str(number_pl_13_33)
int_number_pl_13_33 = True
pl_print_string_13_33 = "Possession lost on defense that came from offense: " + string_number_pl_13_33
print(white % pl_print_string_13_33)
elif defense_input in defense_midfield_words:
first_number_pl_13_23 = pl_list_13_23[0]
pl_list_13_23.remove(first_number_pl_13_23)
number_pl_13_23 = pl_list_13_23[0]
string_number_pl_13_23 = str(number_pl_13_23)
int_number_pl_13_23 = True
pl_print_string_13_23 = "Possession lost on defense that came from midfield: " + string_number_pl_13_23
print(white % pl_print_string_13_23)
first_number_pl_13 = pl_list_13[0]
pl_list_13.remove(first_number_pl_13)
number_pl_13 = pl_list_13[0]
string_number_pl_13 = str(number_pl_13)
int_number_pl_13 = True
pl_print_string_13 = "Possession lost on defense: " + string_number_pl_13
print(white % pl_print_string_13)
first_number_pl = pl_list[0]
pl_list.remove(first_number_pl)
number_pl = pl_list[0]
string_number_pl = str(number_pl)
int_number_pl = True
pl_print_string = "Possession lost:" + string_number_pl
print(white % pl_print_string)
elif choice in offside_words:
first_number_offside = offside_list[0]
offside_list.remove(first_number_offside)
number_offside = offside_list[0]
string_number_offside = str(number_offside)
int_number_offside = True
offside_print_string = "Offside(s):" + string_number_offside
print(white % offside_print_string)
# when the user does not know the commands a howto.txt will be popped up
elif choice == "help":
howto = "notepad.exe how.txt"
sub.Popen(howto)
# -----> * Quit Function * <-----
# if the user wants to quit he types q to begin the process
elif choice == "q":
# start print out/write on file each stat
# if it was called
if int_number_pk_goal:
# print the number of the stat
print("Penalty Kick goal(s): ", string_number_pk_goal)
# write on the file
safile.write("Penalty Kick goal(s): ")
safile.write(string_number_pk_goal)
# if it was not called
elif not int_number_pk_goal:
# print/write the time that the stat was called was None
print("Penalty Kick goal(s): 0")
safile.write("\nPenalty Kick goal(s): 0")
if int_number_pk_missed:
print("Penalty Kick(s) missed: ", string_number_pk_missed)
safile.write("\nPenalty Kick(s) missed: ")
safile.write(string_number_pk_missed)
elif not int_number_pk_missed:
print("Penalty Kick(s) missed: 0")
safile.write("\nPenalty Kick(s) missed: 0 ")
if int_number_pk_saved:
print("Penalty Kick(s) saved: ", string_number_pk_saved)
safile.write("\nPenalty Kick(s) saved: ")
safile.write(string_number_pk_saved)
elif not int_number_pk_saved:
print("Penalty Kick(s) saved: 0")
safile.write("\nPenalty Kick(s) saved: 0")
if int_number_pk:
print("Penalty Kick(s): ", string_number_pk)
safile.write("\nPenalty Kick(s): ")
safile.write(string_number_pk)
elif not int_number_pk:
print("Penalty Kick(s): 0")
safile.write("\nPenalty Kick(s): 0")
if int_number_fk_gd:
print("Free Kick(s) with Good Delivery: ", string_number_fk_gd)
safile.write("\nFree Kick(s) with Good Delivery: ")
safile.write(string_number_fk_gd)
elif not int_number_fk_gd:
print("Free Kick(s) with Good Delivery: 0")
safile.write("\nFree Kick(s) with Good Delivery: 0")
if int_number_fk_pd:
print("Free Kick(s) with Good Delivery: ", string_number_fk_pd)
safile.write("\nFree Kick(s) with Poor Delivery: ")
safile.write(string_number_fk_pd)
elif not int_number_fk_pd:
print("Free Kick(s) with Poor Delivery: 0")
safile.write("\nFree Kick(s) with Poor Delivery: 0")
if int_number_fk:
print("Free Kick(s): ", string_number_fk)
safile.write("\nFree Kick(s): ")
safile.write(string_number_fk)
elif not int_number_fk:
print("Free Kick(s): 0")
safile.write("\nFree Kick(s): 0")
if int_number_ck_gd:
print("Corner Kick(s) with Good Delivery: ", string_number_ck_gd)
safile.write("\nCorner Kick(s) with Good Delivery: ")
safile.write(string_number_ck_gd)
elif not int_number_ck_gd:
print("Corner Kick(s) with Good Delivery: 0")
safile.write("\nCorner Kick(s) with Good Delivery: ")
if int_number_ck_pd:
print("Corner Kick(s) with Poor Delivery: ", string_number_ck_pd)
safile.write("\nCorner Kick(s) with Good Delivery: ")
safile.write(string_number_ck_pd)
elif not int_number_ck_pd:
print("Corner Kick(s) with Poor Delivery: 0")
safile.write("\nCorner Kick(s) with Good Delivery: 0")
if int_number_ck:
print("Corner Kick(s): ", string_number_ck)
safile.write("\nCorner Kick(s): ")
safile.write(string_number_ck)
elif not int_number_ck:
print("Corner Kick(s): 0")
safile.write("\nCorner Kick(s): 0")
if int_number_ti_gd:
print("Throw In(s) with Good Delivery: ", string_number_ti_gd)
safile.write("\nThrow In(s) with Good Delivery: ")
safile.write(string_number_ti_gd)
elif not int_number_ti_gd:
print("Throw In(s) with Good Delivery: 0")
safile.write("\nThrow In(s) with Good Delivery: 0")
if int_number_ti_pd:
print("Throw In(s) with Poor Delivery: ", string_number_ti_pd)
safile.write("\nThrow In(s) with Poor Delivery: ")
safile.write(string_number_ti_pd)
elif not int_number_ti_pd:
print("Throw In(s) with Poor Delivery: 0")
safile.write("\nThrow In(s) with Poor Delivery: 0")
if int_number_ti:
print("Throw In(s): ", string_number_ti)
safile.write("\nThrow In(s): ")
safile.write(string_number_ti)
elif not int_number_ti:
print("Throw In(s): 0")
safile.write("\nThrow In(s): 0")
if int_number_crosses_gd:
print("Cross(es) with Good Delivery: ", string_number_crosses_gd)
safile.write("\nCross(es) with Good Delivery: ")
safile.write(string_number_crosses_gd)
elif not int_number_crosses_gd:
print("Cross(es) with Good Delivery: 0")
safile.write("\nCross(es) with Good Delivery: ")
if int_number_crosses_pd:
print("Cross(es) with Poor Delivery: ", string_number_crosses_pd)
safile.write("\nCross(es) with Poor Delivery: ")
safile.write(string_number_crosses_pd)
elif not int_number_crosses_pd:
print("Cross(es) with Poor Delivery: 0")
safile.write("\nCross(es) with Poor Delivery: 0")
if int_number_crosses:
print("Cross(es): ", string_number_crosses)
safile.write("\nCross(es): ")
safile.write(string_number_crosses)
elif not int_number_crosses:
print("Cross(es): 0")
safile.write("\nCross(es): 0")
if int_number_shots_gd:
print("Shot(s) on Target: ", string_number_shots_gd)
safile.write("\nShot(s) on Target: ")
safile.write(string_number_shots_gd)
elif not int_number_shots_gd:
print("Shot(s) on Target: 0")
safile.write("\nShot(s) on Target: 0")
if int_number_shots_pd:
print("Shot(s) off Target: ", string_number_shots_pd)
safile.write("\nShot(s) off Target: ")
safile.write(string_number_shots_pd)
elif not int_number_shots_pd:
print("Shot(s) off Target: 0")
safile.write("\nShot(s) off Target: 0")
if int_number_shots:
print("Shot(s): ", string_number_shots)
safile.write("\nShot(s): ")
safile.write(string_number_shots)
elif not int_number_shots:
print("Shot(s): 0")
safile.write("\nShot(s): 0")
if int_number_headers_gd:
print("Header(s) on Target: ", string_number_headers_gd)
safile.write("\nHeader(s) on Target: ")
safile.write(string_number_headers_gd)
elif not int_number_headers_gd:
print("Header(s) on Target: 0")
safile.write("\nHeader(s) on Target: 0")
if int_number_headers_pd:
print("Header(s) off Target: ", string_number_headers_pd)
safile.write("\nHeader(s) off Target: ")
safile.write(string_number_headers_pd)
elif not int_number_headers_pd:
print("Header(s) off Target: 0")
safile.write("\nHeader(s) off Target: 0")
if int_number_headers:
print("Header(s): ", string_number_headers)
safile.write("\nHeader(s): ")
safile.write(string_number_headers)
elif not int_number_headers:
print("Header(s): 0")
safile.write("\nHeader(s): 0")
if int_number_v1_w:
print("1vs1 Won: ", string_number_v1_w)
safile.write("\n1vs1 Won: ")
safile.write(string_number_v1_w)
elif not int_number_v1_w:
print("1vs1 Won: 0")
safile.write("\n1vs1 Won: 0")
if int_number_v1_l:
print("1vs1 Lost: ", string_number_v1_l)
safile.write("\n1vs1 Lost: ")
safile.write(string_number_v1_l)
elif not int_number_v1_l:
print("1vs1 Lost: 0")
safile.write("\n1vs1 Lost: 0")
if int_number_v1:
print("1vs1: ", string_number_v1)
safile.write("\n1vs1: ")
safile.write(string_number_v1)
elif not int_number_v1:
print("1vs1: 0")
safile.write("\n1vs1: 0")
if int_number_long_passes_second_ball_attack:
print("Second Ball Long Pass Interceptions on Attack: ", string_number_long_passes_second_ball_attack)
safile.write("\nSecond Ball Long Pass Interceptions on Attack: ")
safile.write(string_number_long_passes_second_ball_attack)
elif not int_number_long_passes_second_ball_attack:
print("Second Ball Long Pass Interceptions on Attack: 0")
safile.write("\nSecond Ball Long Pass Interceptions on Attack: 0")
if int_number_long_passes_third_ball_attack:
print("Third Ball Long Pass Interceptions on Attack: ", string_number_long_passes_third_ball_attack)
safile.write("\nThird Ball Long Pass Interceptions on Attack: ")
safile.write(string_number_long_passes_third_ball_attack)
elif not int_number_long_passes_third_ball_attack:
print("Third Ball Long Pass Interceptions on Attack: 0")
safile.write("\nThird Ball Long Pass Interceptions on Attack: 0")
if int_number_long_passes_attack:
print("Long Pass Interceptions on Attack: ", string_number_long_passes_attack)
safile.write("\nLong Pass Interceptions on Attack: ")
safile.write(string_number_long_passes_attack)
elif not int_number_long_passes_attack:
print("Long Pass Interceptions on Attack: 0")
safile.write("\nLong Pass Interceptions on Attack: 0")
if int_number_long_passes_second_ball_defense:
print("Second Ball Long Pass Interceptions on Defense: ", string_number_long_passes_second_ball_defense)
safile.write("\nSecond Ball Long Pass Interceptions on Defense: ")
safile.write(string_number_long_passes_second_ball_defense)
elif not int_number_long_passes_second_ball_defense:
print("Second Ball Long Pass Interceptions on Defense: 0")
safile.write("\nSecond Ball Long Pass Interceptions on Defense: 0")
if int_number_long_passes_third_ball_defense:
print("Third Ball Long Pass Interceptions on Defense: ", string_number_long_passes_third_ball_defense)
safile.write("\nThird Ball Long Pass Interceptions on Defense: ")
safile.write(string_number_long_passes_third_ball_defense)
elif not int_number_long_passes_third_ball_defense:
print("Third Ball Long Pass Interceptions on Defense: 0")
safile.write("\nThird Ball Long Pass Interceptions on Defense: 0")
if int_number_long_passes_defense:
print("Long Pass Interceptions on Defense: ", string_number_long_passes_defense)
safile.write("\nLong Pass Interceptions on Defense: ")
safile.write(string_number_long_passes_defense)
elif not int_number_long_passes_defense:
print("Long Pass Interceptions on Defense: 0")
safile.write("\nLong Pass Interceptions on Defense: 0")
if int_number_long_passes:
print("Long Pass Interceptions: ", string_number_long_passes)
safile.write("\nLong Pass Interceptions: ")
safile.write(string_number_long_passes)
elif not int_number_long_passes:
print("Long Pass Interceptions: 0")
safile.write("\nLong Pass Interceptions: 0")
if int_number_saves:
print("Saves: ", string_number_save)
safile.write("\nSaves: ")
safile.write(string_number_save)
elif not int_number_saves:
print("\nSaves: 0")
safile.write("\nSaves: 0")
if int_number_pl_33_23:
print("Possession(s) lost on offence that came from midfield: ", string_number_pl_33_23)
safile.write("\nPossession(s) lost on offence that came from midfield: ")
safile.write(string_number_pl_33_23)
elif not int_number_pl_33_23:
print("Possession lost on offence that came from midfield: 0")
safile.write("\nPossession lost on offence that came from midfield: 0")
if int_number_pl_33_13:
print("Possession(s) lost on offence that came from defense: ", string_number_pl_33_13)
safile.write("\nPossession(s) lost on offence that came from defense: ")
safile.write(string_number_pl_33_13)
elif not int_number_pl_33_13:
print("Possession lost on offence that came from defense: 0")
safile.write("\nPossession lost on offence that came from defense: 0")
if int_number_pl_33:
print("Possession(s) lost on offence: ", string_number_pl_33)
safile.write("\nPossession(s) lost on offence: ")
safile.write(string_number_pl_33)
elif not int_number_pl_33:
print("Possession lost on offence: 0")
safile.write("\nPossession lost on offence: 0")
if int_number_pl_23_33:
print("Possession(s) lost on midfield that came from offense: ", string_number_pl_23_33)
safile.write("\nPossession(s) lost on midfield that came from offense: ")
safile.write(string_number_pl_23_33)
elif not int_number_pl_23_33:
print("Possession lost on midfield that came from offense: 0")
safile.write("\nPossession lost on midfield that came from offense: 0")
if int_number_pl_23_13:
print("Possession(s) lost on midfield that came from defense: ", string_number_pl_23_13)
safile.write("\nPossession(s) lost on midfield that came from defense: ")
safile.write(string_number_pl_23_13)
elif not int_number_pl_23_13:
print("Possession lost on midfield that came from defense: 0")
safile.write("\nPossession lost on midfield that came from defense: 0")
if int_number_pl_23:
print("Possession(s) lost on midfield: ", string_number_pl_23)
safile.write("\nPossession(s) lost on midfield: ")
safile.write(string_number_pl_23)
elif not int_number_pl_23:
print("Possession lost on midfield: 0")
safile.write("\nPossession lost on midfield: 0")
if int_number_pl_13_33:
print("Possession(s) lost on defense that came from offense: ", string_number_pl_13_33)
safile.write("\nPossession(s) lost on defense that came from offense: ")
safile.write(string_number_pl_13_33)
elif not int_number_pl_13_33:
print("Possession lost on defense that came from offense: 0")
safile.write("\nPossession lost on defense that came from offense: 0")
if int_number_pl_13_23:
print("Possession(s) lost on defense that came from offense: ", string_number_pl_13_23)
safile.write("\nPossession(s) lost on defense that came from offense: ")
safile.write(string_number_pl_13_23)
elif not int_number_pl_13_23:
print("Possession lost on defense that came from offense: 0")
safile.write("\nPossession lost on defense that came from offense: 0")
if int_number_pl_13:
print("Possession(s) lost on defense: ", string_number_pl_13)
safile.write("\nPossession(s) lost on defense: ")
safile.write(string_number_pl_13)
elif not int_number_pl_13:
print("Possession lost on defense: 0")
safile.write("\nPossession lost on defense: 0")
if int_number_pl:
print("Possession(s) lost: ", string_number_pl)
safile.write("\nPossession(s) lost: ")
safile.write(string_number_pl)
elif not int_number_pl:
print("Possessions lost: 0")
safile.write("\nPossession lost: 0")
if int_number_offside:
print("Offside(s): ", string_number_offside)
safile.write("\nOffside(s): ")
safile.write(string_number_offside)
elif not int_number_offside:
print("Offside(s) h2: 0")
safile.write("\nOffside(s): 0 ")
# close the file
safile.close()
# break the while loop, quiting the function
break
body()
| |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class systembw_stats(base_resource) :
""" Statistics for bw resource.
"""
def __init__(self) :
self._clearstats = ""
self._httpcltpoolinactive = 0
self._httpcltpooloutactive = 0
self._httpsvr200okresp = 0
self._httpsvr200okresprate = 0
self._httpsvr404notfound = 0
self._httpsvr404notfoundrate = 0
self._httpclterrstray = 0
self._httpclterrstrayrate = 0
self._httpcltttfplwm = 0
self._httpcltttfplwmrate = 0
self._httpcltttfp_0 = 0
self._httpcltttfp_0rate = 0
self._httpcltttfp_1 = 0
self._httpcltttfp_1rate = 0
self._httpcltttfp_2 = 0
self._httpcltttfp_2rate = 0
self._httpcltttfp_3 = 0
self._httpcltttfp_3rate = 0
self._httpcltttfp_4 = 0
self._httpcltttfp_4rate = 0
self._httpcltttfp_5 = 0
self._httpcltttfp_5rate = 0
self._httpcltttfp_6 = 0
self._httpcltttfp_6rate = 0
self._httpcltttfp_7 = 0
self._httpcltttfp_7rate = 0
self._httpcltttfphwm = 0
self._httpcltttfphwmrate = 0
self._httpcltttfpmax = 0
self._httpcltttlplwm = 0
self._httpcltttlplwmrate = 0
self._httpcltttlp_0 = 0
self._httpcltttlp_0rate = 0
self._httpcltttlp_1 = 0
self._httpcltttlp_1rate = 0
self._httpcltttlp_2 = 0
self._httpcltttlp_2rate = 0
self._httpcltttlp_3 = 0
self._httpcltttlp_3rate = 0
self._httpcltttlp_4 = 0
self._httpcltttlp_4rate = 0
self._httpcltttlp_5 = 0
self._httpcltttlp_5rate = 0
self._httpcltttlp_6 = 0
self._httpcltttlp_6rate = 0
self._httpcltttlp_7 = 0
self._httpcltttlp_7rate = 0
self._httpcltttlphwm = 0
self._httpcltttlphwmrate = 0
self._httpcltttlpmax = 0
@property
def clearstats(self) :
"""Clear the statsistics / counters.<br/>Possible values = basic, full.
"""
try :
return self._clearstats
except Exception as e:
raise e
@clearstats.setter
def clearstats(self, clearstats) :
"""Clear the statsistics / counters
"""
try :
self._clearstats = clearstats
except Exception as e:
raise e
@property
def httpcltttfp_4rate(self) :
"""Rate (/s) counter for httpcltttfp_4.
"""
try :
return self._httpcltttfp_4rate
except Exception as e:
raise e
@property
def httpcltttlp_5rate(self) :
"""Rate (/s) counter for httpcltttlp_5.
"""
try :
return self._httpcltttlp_5rate
except Exception as e:
raise e
@property
def httpcltttfp_2rate(self) :
"""Rate (/s) counter for httpcltttfp_2.
"""
try :
return self._httpcltttfp_2rate
except Exception as e:
raise e
@property
def httpcltttfp_7rate(self) :
"""Rate (/s) counter for httpcltttfp_7.
"""
try :
return self._httpcltttfp_7rate
except Exception as e:
raise e
@property
def httpcltttlphwmrate(self) :
"""Rate (/s) counter for httpcltttlphwm.
"""
try :
return self._httpcltttlphwmrate
except Exception as e:
raise e
@property
def httpcltttfp_0rate(self) :
"""Rate (/s) counter for httpcltttfp_0.
"""
try :
return self._httpcltttfp_0rate
except Exception as e:
raise e
@property
def httpcltttlplwmrate(self) :
"""Rate (/s) counter for httpcltttlplwm.
"""
try :
return self._httpcltttlplwmrate
except Exception as e:
raise e
@property
def httpcltttfp_4(self) :
"""Number of Responses Falling on Band-4 for TTFP.
"""
try :
return self._httpcltttfp_4
except Exception as e:
raise e
@property
def httpcltpooloutactive(self) :
"""No of responses Received.
"""
try :
return self._httpcltpooloutactive
except Exception as e:
raise e
@property
def httpcltttlplwm(self) :
"""Number of Responses Falling on LWM for TTLP.
"""
try :
return self._httpcltttlplwm
except Exception as e:
raise e
@property
def httpcltttlp_1(self) :
"""Number of Responses Falling on Band-1 for TTLP.
"""
try :
return self._httpcltttlp_1
except Exception as e:
raise e
@property
def httpsvr404notfoundrate(self) :
"""Rate (/s) counter for httpsvr404notfound.
"""
try :
return self._httpsvr404notfoundrate
except Exception as e:
raise e
@property
def httpcltttlp_6(self) :
"""Number of Responses Falling on Band-6 for TTLP.
"""
try :
return self._httpcltttlp_6
except Exception as e:
raise e
@property
def httpcltttlphwm(self) :
"""Number of Responses Falling on HWM for TTLP.
"""
try :
return self._httpcltttlphwm
except Exception as e:
raise e
@property
def httpcltttlp_1rate(self) :
"""Rate (/s) counter for httpcltttlp_1.
"""
try :
return self._httpcltttlp_1rate
except Exception as e:
raise e
@property
def httpcltpoolinactive(self) :
"""No of requests sent from BW client.
"""
try :
return self._httpcltpoolinactive
except Exception as e:
raise e
@property
def httpcltttlp_3(self) :
"""Number of Responses Falling on Band-3 for TTLP.
"""
try :
return self._httpcltttlp_3
except Exception as e:
raise e
@property
def httpcltttlp_5(self) :
"""Number of Responses Falling on Band-5 for TTLP.
"""
try :
return self._httpcltttlp_5
except Exception as e:
raise e
@property
def httpcltttlp_6rate(self) :
"""Rate (/s) counter for httpcltttlp_6.
"""
try :
return self._httpcltttlp_6rate
except Exception as e:
raise e
@property
def httpsvr200okresprate(self) :
"""Rate (/s) counter for httpsvr200okresp.
"""
try :
return self._httpsvr200okresprate
except Exception as e:
raise e
@property
def httpcltttfp_1rate(self) :
"""Rate (/s) counter for httpcltttfp_1.
"""
try :
return self._httpcltttfp_1rate
except Exception as e:
raise e
@property
def httpcltttfp_1(self) :
"""Number of Responses Falling on Band-1 for TTFP.
"""
try :
return self._httpcltttfp_1
except Exception as e:
raise e
@property
def httpcltttfphwmrate(self) :
"""Rate (/s) counter for httpcltttfphwm.
"""
try :
return self._httpcltttfphwmrate
except Exception as e:
raise e
@property
def httpcltttfp_6(self) :
"""Number of Responses Falling on Band-6 for TTFP.
"""
try :
return self._httpcltttfp_6
except Exception as e:
raise e
@property
def httpcltttfpmax(self) :
"""Peak RTT observed for Time to First response packet.
"""
try :
return self._httpcltttfpmax
except Exception as e:
raise e
@property
def httpcltttfp_5(self) :
"""Number of Responses Falling on Band-5 for TTFP.
"""
try :
return self._httpcltttfp_5
except Exception as e:
raise e
@property
def httpcltttlpmax(self) :
"""Peak RTT observed for Time to Last response packet.
"""
try :
return self._httpcltttlpmax
except Exception as e:
raise e
@property
def httpcltttlp_4rate(self) :
"""Rate (/s) counter for httpcltttlp_4.
"""
try :
return self._httpcltttlp_4rate
except Exception as e:
raise e
@property
def httpcltttlp_7rate(self) :
"""Rate (/s) counter for httpcltttlp_7.
"""
try :
return self._httpcltttlp_7rate
except Exception as e:
raise e
@property
def httpcltttfp_0(self) :
"""Number of Responses Falling on Band-0 for TTFP.
"""
try :
return self._httpcltttfp_0
except Exception as e:
raise e
@property
def httpcltttfp_3(self) :
"""Number of Responses Falling on Band-3 for TTFP.
"""
try :
return self._httpcltttfp_3
except Exception as e:
raise e
@property
def httpcltttfphwm(self) :
"""Number of Responses Falling on HWM for TTFP.
"""
try :
return self._httpcltttfphwm
except Exception as e:
raise e
@property
def httpclterrstray(self) :
"""Number of stray packets received from server without HTTP request.
"""
try :
return self._httpclterrstray
except Exception as e:
raise e
@property
def httpcltttlp_0rate(self) :
"""Rate (/s) counter for httpcltttlp_0.
"""
try :
return self._httpcltttlp_0rate
except Exception as e:
raise e
@property
def httpcltttfplwm(self) :
"""Number of Responses Falling on LWM for TTFP.
"""
try :
return self._httpcltttfplwm
except Exception as e:
raise e
@property
def httpcltttlp_0(self) :
"""Number of Responses Falling on Band-0 for TTLP.
"""
try :
return self._httpcltttlp_0
except Exception as e:
raise e
@property
def httpclterrstrayrate(self) :
"""Rate (/s) counter for httpclterrstray.
"""
try :
return self._httpclterrstrayrate
except Exception as e:
raise e
@property
def httpcltttfplwmrate(self) :
"""Rate (/s) counter for httpcltttfplwm.
"""
try :
return self._httpcltttfplwmrate
except Exception as e:
raise e
@property
def httpcltttlp_2(self) :
"""Number of Responses Falling on Band-2 for TTLP.
"""
try :
return self._httpcltttlp_2
except Exception as e:
raise e
@property
def httpcltttfp_2(self) :
"""Number of Responses Falling on Band-2 for TTFP.
"""
try :
return self._httpcltttfp_2
except Exception as e:
raise e
@property
def httpcltttlp_2rate(self) :
"""Rate (/s) counter for httpcltttlp_2.
"""
try :
return self._httpcltttlp_2rate
except Exception as e:
raise e
@property
def httpcltttlp_4(self) :
"""Number of Responses Falling on Band-4 for TTLP.
"""
try :
return self._httpcltttlp_4
except Exception as e:
raise e
@property
def httpcltttfp_7(self) :
"""Number of Responses Falling on Band-7 for TTFP.
"""
try :
return self._httpcltttfp_7
except Exception as e:
raise e
@property
def httpsvr200okresp(self) :
"""Number of 200 Ok response sent from the BW appliance.
"""
try :
return self._httpsvr200okresp
except Exception as e:
raise e
@property
def httpsvr404notfound(self) :
"""Number of 404 Not Found responses sent.
"""
try :
return self._httpsvr404notfound
except Exception as e:
raise e
@property
def httpcltttlp_3rate(self) :
"""Rate (/s) counter for httpcltttlp_3.
"""
try :
return self._httpcltttlp_3rate
except Exception as e:
raise e
@property
def httpcltttfp_6rate(self) :
"""Rate (/s) counter for httpcltttfp_6.
"""
try :
return self._httpcltttfp_6rate
except Exception as e:
raise e
@property
def httpcltttfp_5rate(self) :
"""Rate (/s) counter for httpcltttfp_5.
"""
try :
return self._httpcltttfp_5rate
except Exception as e:
raise e
@property
def httpcltttfp_3rate(self) :
"""Rate (/s) counter for httpcltttfp_3.
"""
try :
return self._httpcltttfp_3rate
except Exception as e:
raise e
@property
def httpcltttlp_7(self) :
"""Number of Responses Falling on Band-7 for TTLP.
"""
try :
return self._httpcltttlp_7
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(systembw_response, response, self.__class__.__name__.replace('_stats',''))
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.systembw
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
""" Use this API to fetch the statistics of all systembw_stats resources that are configured on netscaler.
"""
try :
obj = systembw_stats()
if not name :
response = obj.stat_resources(service, option_)
return response
except Exception as e:
raise e
class Clearstats:
basic = "basic"
full = "full"
class systembw_response(base_response) :
def __init__(self, length=1) :
self.systembw = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.systembw = [systembw_stats() for _ in range(length)]
| |
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# This test is for the pipeline reset code
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from builtins import *
import unittest
import logging
import json
import bson.json_util as bju
import attrdict as ad
import arrow
import numpy as np
# Our imports
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.pipeline.reset as epr
import emission.analysis.plotting.geojson.geojson_feature_converter as gfc
# Test imports
import emission.tests.common as etc
class TestPipelineReset(unittest.TestCase):
def setUp(self):
np.random.seed(61297777)
def tearDown(self):
logging.debug("Clearing related databases")
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().remove({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().remove({"user_id": self.testUUID})
edb.get_usercache_db().remove({"user_id": self.testUUID})
def compare_result(self, result, expect):
# This is basically a bunch of asserts to ensure that the timeline is as
# expected. We are not using a recursive diff because things like the IDs
# will change from run to run. Instead, I pick out a bunch of important
# things that are highly user visible
# Since this is deterministic, we can also include things that are not that user visible :)
for rt, et in zip(result, expect):
logging.debug("Comparing %s -> %s with %s -> %s" %
(rt.properties.start_fmt_time, rt.properties.end_fmt_time,
et.properties.start_fmt_time, et.properties.end_fmt_time))
self.assertEqual(len(result), len(expect))
for rt, et in zip(result, expect):
logging.debug("======= Comparing trip =========")
logging.debug(json.dumps(rt.properties, indent=4, default=bju.default))
logging.debug(json.dumps(et.properties, indent=4, default=bju.default))
# Highly user visible
self.assertEqual(rt.properties.start_ts, et.properties.start_ts)
self.assertEqual(rt.properties.end_ts, et.properties.end_ts)
self.assertEqual(rt.properties.start_loc, et.properties.start_loc)
self.assertEqual(rt.properties.end_loc, et.properties.end_loc)
self.assertAlmostEqual(rt.properties.distance, et.properties.distance, places=2)
self.assertEqual(len(rt.features), len(et.features))
for rs, es in zip(rt.features, et.features):
logging.debug("------- Comparing trip feature ---------")
logging.debug(json.dumps(rs, indent=4, default=bju.default))
logging.debug(json.dumps(es, indent=4, default=bju.default))
self.assertEqual(rs.type, es.type)
if rs.type == "Feature":
# The first place will not have an enter time, so we can't check it
if 'enter_fmt_time' not in rs.properties:
self.assertNotIn("enter_fmt_time", es.properties)
else:
self.assertEqual(rs.properties.enter_fmt_time, es.properties.enter_fmt_time)
# Similarly, the last place will not have an exit time, so we can't check it
if 'exit_fmt_time' not in rs.properties:
self.assertNotIn("exit_fmt_time", es.properties)
else:
self.assertEqual(rs.properties.exit_fmt_time, es.properties.exit_fmt_time)
self.assertEqual(rs.properties.feature_type, es.properties.feature_type)
else:
self.assertEqual(rs.type, "FeatureCollection")
self.assertEqual(rs.features[0].properties.start_fmt_time, es.features[0].properties.start_fmt_time)
self.assertEqual(rs.features[0].properties.end_fmt_time, es.features[0].properties.end_fmt_time)
self.assertEqual(rs.features[0].properties.sensed_mode, es.features[0].properties.sensed_mode)
self.assertEqual(len(rs.features[0].properties.speeds), len(es.features[0].properties.speeds))
self.assertEqual(len(rs.features[0].geometry.coordinates), len(es.features[0].geometry.coordinates))
logging.debug(20 * "-")
logging.debug(20 * "=")
def testResetToStart(self):
"""
- Load data for both days
- Run pipelines
- Verify that all is well
- Reset to start
- Verify that there is no analysis data
- Re-run pipelines
- Verify that all is well
"""
# Load all data
dataFile_1 = "emission/tests/data/real_examples/shankari_2016-07-22"
dataFile_2 = "emission/tests/data/real_examples/shankari_2016-07-25"
start_ld_1 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 22})
start_ld_2 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 25})
cacheKey_1 = "diary/trips-2016-07-22"
cacheKey_2 = "diary/trips-2016-07-25"
ground_truth_1 = json.load(open(dataFile_1+".ground_truth"), object_hook=bju.object_hook)
ground_truth_2 = json.load(open(dataFile_2+".ground_truth"), object_hook=bju.object_hook)
# Run both pipelines
etc.setupRealExample(self, dataFile_1)
etc.runIntakePipeline(self.testUUID)
self.entries = json.load(open(dataFile_2), object_hook = bju.object_hook)
etc.setupRealExampleWithEntries(self)
etc.runIntakePipeline(self.testUUID)
# Check results: so far, so good
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
# Reset pipeline to start
epr.reset_user_to_start(self.testUUID, is_dry_run=False)
# Now there are no results
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.assertEqual(api_result, [])
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.assertEqual(api_result, [])
# Re-run the pipeline again
etc.runIntakePipeline(self.testUUID)
# Should be back to ground truth
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
def testResetToTsInMiddleOfPlace(self):
"""
- Load data for both days
- Run pipelines
- Verify that all is well
- Reset to a date between the two
- Verify that analysis data for the first day is unchanged
- Verify that analysis data for the second day does not exist
- Re-run pipelines
- Verify that all is well
"""
# Load all data
dataFile_1 = "emission/tests/data/real_examples/shankari_2016-07-22"
dataFile_2 = "emission/tests/data/real_examples/shankari_2016-07-25"
start_ld_1 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 22})
start_ld_2 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 25})
cacheKey_1 = "diary/trips-2016-07-22"
cacheKey_2 = "diary/trips-2016-07-25"
ground_truth_1 = json.load(open(dataFile_1+".ground_truth"), object_hook=bju.object_hook)
ground_truth_2 = json.load(open(dataFile_2+".ground_truth"), object_hook=bju.object_hook)
# Run both pipelines
etc.setupRealExample(self, dataFile_1)
etc.runIntakePipeline(self.testUUID)
self.entries = json.load(open(dataFile_2), object_hook = bju.object_hook)
etc.setupRealExampleWithEntries(self)
etc.runIntakePipeline(self.testUUID)
# Check results: so far, so good
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
# Reset pipeline to july 23.
# Note that this is actually 22nd 16:00 PDT, so this is partway
# through the 22nd
reset_ts = arrow.get("2016-07-23").timestamp
epr.reset_user_to_ts(self.testUUID, reset_ts, is_dry_run=False)
# First day is unchanged, except that the last place doesn't have
# exit data.
# TODO: Modify ground truth to capture this change
# Until then, we know that this will fail
# api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
# self.compare_result(ad.AttrDict({'result': api_result}).result,
# ad.AttrDict(ground_truth_1).data)
# Second day does not exist
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
logging.debug(json.dumps(api_result, indent=4, default=bju.default))
self.assertEqual(api_result, [])
# Re-run the pipeline again
etc.runIntakePipeline(self.testUUID)
# Should be back to ground truth
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
def testResetToTsInMiddleOfTrip(self):
"""
- Load data for both days
- Run pipelines
- Verify that all is well
- Reset to a date between the two
- Verify that analysis data for the first day is unchanged
- Verify that analysis data for the second day does not exist
- Re-run pipelines
- Verify that all is well
"""
# Load all data
dataFile_1 = "emission/tests/data/real_examples/shankari_2016-07-22"
dataFile_2 = "emission/tests/data/real_examples/shankari_2016-07-25"
start_ld_1 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 22})
start_ld_2 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 25})
cacheKey_1 = "diary/trips-2016-07-22"
cacheKey_2 = "diary/trips-2016-07-25"
ground_truth_1 = json.load(open(dataFile_1+".ground_truth"), object_hook=bju.object_hook)
ground_truth_2 = json.load(open(dataFile_2+".ground_truth"), object_hook=bju.object_hook)
# Run both pipelines
etc.setupRealExample(self, dataFile_1)
etc.runIntakePipeline(self.testUUID)
self.entries = json.load(open(dataFile_2), object_hook = bju.object_hook)
etc.setupRealExampleWithEntries(self)
etc.runIntakePipeline(self.testUUID)
# Check results: so far, so good
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
# Reset pipeline to july 24.
# Note that this is actually 23nd 16:00 PDT
# This will reset in the middle of the untracked time, which is
# technically a trip, and will allow us to test the trip resetting
# code
reset_ts = arrow.get("2016-07-24").timestamp
epr.reset_user_to_ts(self.testUUID, reset_ts, is_dry_run=False)
# Second day does not exist
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
logging.debug(json.dumps(api_result, indent=4, default=bju.default))
self.assertEqual(api_result, [])
# Re-run the pipeline again
etc.runIntakePipeline(self.testUUID)
# Should be back to ground truth
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
def testResetToFuture(self):
"""
- Load data for both days
- Run pipelines
- Reset to a date after the two
- Verify that all is well
- Re-run pipelines and ensure that there are no errors
"""
# Load all data
dataFile_1 = "emission/tests/data/real_examples/shankari_2016-07-22"
dataFile_2 = "emission/tests/data/real_examples/shankari_2016-07-25"
start_ld_1 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 22})
start_ld_2 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 25})
cacheKey_1 = "diary/trips-2016-07-22"
cacheKey_2 = "diary/trips-2016-07-25"
ground_truth_1 = json.load(open(dataFile_1+".ground_truth"), object_hook=bju.object_hook)
ground_truth_2 = json.load(open(dataFile_2+".ground_truth"), object_hook=bju.object_hook)
# Run both pipelines
etc.setupRealExample(self, dataFile_1)
etc.runIntakePipeline(self.testUUID)
self.entries = json.load(open(dataFile_2), object_hook = bju.object_hook)
etc.setupRealExampleWithEntries(self)
etc.runIntakePipeline(self.testUUID)
# Reset to a date well after the two days
reset_ts = arrow.get("2017-07-24").timestamp
epr.reset_user_to_ts(self.testUUID, reset_ts, is_dry_run=False)
# Data should be untouched because of early return
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
# Re-running the pipeline again should not affect anything
etc.runIntakePipeline(self.testUUID)
def testResetToPast(self):
"""
- Load data for both days
- Run pipelines
- Verify that all is well
- Reset to a date before both
- Verify that analysis data for the both days is removed
- Re-run pipelines
- Verify that all is well
"""
# Load all data
dataFile_1 = "emission/tests/data/real_examples/shankari_2016-07-22"
dataFile_2 = "emission/tests/data/real_examples/shankari_2016-07-25"
start_ld_1 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 22})
start_ld_2 = ecwl.LocalDate({'year': 2016, 'month': 7, 'day': 25})
cacheKey_1 = "diary/trips-2016-07-22"
cacheKey_2 = "diary/trips-2016-07-25"
ground_truth_1 = json.load(open(dataFile_1+".ground_truth"), object_hook=bju.object_hook)
ground_truth_2 = json.load(open(dataFile_2+".ground_truth"), object_hook=bju.object_hook)
# Run both pipelines
etc.setupRealExample(self, dataFile_1)
etc.runIntakePipeline(self.testUUID)
self.entries = json.load(open(dataFile_2), object_hook = bju.object_hook)
etc.setupRealExampleWithEntries(self)
etc.runIntakePipeline(self.testUUID)
# Verify that all is well
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
# Reset to a date well before the two days
reset_ts = arrow.get("2015-07-24").timestamp
epr.reset_user_to_ts(self.testUUID, reset_ts, is_dry_run=False)
# Data should be completely deleted
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.assertEqual(api_result, [])
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.assertEqual(api_result, [])
# Re-running the pipeline again
etc.runIntakePipeline(self.testUUID)
# Should reconstruct everything
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_1, start_ld_1)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_1).data)
api_result = gfc.get_geojson_for_dt(self.testUUID, start_ld_2, start_ld_2)
self.compare_result(ad.AttrDict({'result': api_result}).result,
ad.AttrDict(ground_truth_2).data)
if __name__ == '__main__':
etc.configLogging()
unittest.main()
| |
"""The tests for the REST sensor platform."""
import unittest
from pytest import raises
from unittest.mock import patch, Mock
import requests
from requests.exceptions import Timeout, MissingSchema, RequestException
import requests_mock
from homeassistant.exceptions import PlatformNotReady
from homeassistant.setup import setup_component
import homeassistant.components.sensor as sensor
import homeassistant.components.rest.sensor as rest
from homeassistant.helpers.config_validation import template
from tests.common import get_test_home_assistant, assert_setup_component
import pytest
class TestRestSensorSetup(unittest.TestCase):
"""Tests for setting up the REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_missing_config(self):
"""Test setup with configuration missing required entries."""
with assert_setup_component(0):
assert setup_component(
self.hass, sensor.DOMAIN, {"sensor": {"platform": "rest"}}
)
def test_setup_missing_schema(self):
"""Test setup with resource missing schema."""
with pytest.raises(MissingSchema):
rest.setup_platform(
self.hass,
{"platform": "rest", "resource": "localhost", "method": "GET"},
None,
)
@patch("requests.Session.send", side_effect=requests.exceptions.ConnectionError())
def test_setup_failed_connect(self, mock_req):
"""Test setup when connection error occurs."""
with raises(PlatformNotReady):
rest.setup_platform(
self.hass,
{"platform": "rest", "resource": "http://localhost"},
lambda devices, update=True: None,
)
@patch("requests.Session.send", side_effect=Timeout())
def test_setup_timeout(self, mock_req):
"""Test setup when connection timeout occurs."""
with raises(PlatformNotReady):
rest.setup_platform(
self.hass,
{"platform": "rest", "resource": "http://localhost"},
lambda devices, update=True: None,
)
@requests_mock.Mocker()
def test_setup_minimum(self, mock_req):
"""Test setup with minimum configuration."""
mock_req.get("http://localhost", status_code=200)
with assert_setup_component(1, "sensor"):
assert setup_component(
self.hass,
"sensor",
{"sensor": {"platform": "rest", "resource": "http://localhost"}},
)
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_minimum_resource_template(self, mock_req):
"""Test setup with minimum configuration (resource_template)."""
mock_req.get("http://localhost", status_code=200)
with assert_setup_component(1, "sensor"):
assert setup_component(
self.hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource_template": "http://localhost",
}
},
)
assert mock_req.call_count == 2
@requests_mock.Mocker()
def test_setup_duplicate_resource(self, mock_req):
"""Test setup with duplicate resources."""
mock_req.get("http://localhost", status_code=200)
with assert_setup_component(0, "sensor"):
assert setup_component(
self.hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"resource_template": "http://localhost",
}
},
)
@requests_mock.Mocker()
def test_setup_get(self, mock_req):
"""Test setup with valid configuration."""
mock_req.get("http://localhost", status_code=200)
with assert_setup_component(1, "sensor"):
assert setup_component(
self.hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"name": "foo",
"unit_of_measurement": "MB",
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": "application/json"},
}
},
)
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_post(self, mock_req):
"""Test setup with valid configuration."""
mock_req.post("http://localhost", status_code=200)
with assert_setup_component(1, "sensor"):
assert setup_component(
self.hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "POST",
"value_template": "{{ value_json.key }}",
"payload": '{ "device": "toaster"}',
"name": "foo",
"unit_of_measurement": "MB",
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": "application/json"},
}
},
)
assert 2 == mock_req.call_count
class TestRestSensor(unittest.TestCase):
"""Tests for REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.initial_state = "initial_state"
self.rest = Mock("rest.RestData")
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect(
'{ "key": "' + self.initial_state + '" }'
),
)
self.name = "foo"
self.unit_of_measurement = "MB"
self.device_class = None
self.value_template = template("{{ value_json.key }}")
self.value_template.hass = self.hass
self.force_update = False
self.resource_template = None
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
self.value_template,
[],
self.force_update,
self.resource_template,
)
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def update_side_effect(self, data):
"""Side effect function for mocking RestData.update()."""
self.rest.data = data
def test_name(self):
"""Test the name."""
assert self.name == self.sensor.name
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
assert self.unit_of_measurement == self.sensor.unit_of_measurement
def test_force_update(self):
"""Test the unit of measurement."""
assert self.force_update == self.sensor.force_update
def test_state(self):
"""Test the initial state."""
self.sensor.update()
assert self.initial_state == self.sensor.state
def test_update_when_value_is_none(self):
"""Test state gets updated to unknown when sensor returns no data."""
self.rest.update = Mock(
"rest.RestData.update", side_effect=self.update_side_effect(None)
)
self.sensor.update()
assert self.sensor.state is None
assert not self.sensor.available
def test_update_when_value_changed(self):
"""Test state gets updated when sensor returns a new status."""
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect('{ "key": "updated_state" }'),
)
self.sensor.update()
assert "updated_state" == self.sensor.state
assert self.sensor.available
def test_update_with_no_template(self):
"""Test update when there is no value template."""
self.rest.update = Mock(
"rest.RestData.update", side_effect=self.update_side_effect("plain_state")
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
None,
[],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert "plain_state" == self.sensor.state
assert self.sensor.available
def test_update_with_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect('{ "key": "some_json_value" }'),
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
None,
["key"],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert "some_json_value" == self.sensor.device_state_attributes["key"]
@patch("homeassistant.components.rest.sensor._LOGGER")
def test_update_with_json_attrs_no_data(self, mock_logger):
"""Test attributes when no JSON result fetched."""
self.rest.update = Mock(
"rest.RestData.update", side_effect=self.update_side_effect(None)
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
None,
["key"],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch("homeassistant.components.rest.sensor._LOGGER")
def test_update_with_json_attrs_not_dict(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect('["list", "of", "things"]'),
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
None,
["key"],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch("homeassistant.components.rest.sensor._LOGGER")
def test_update_with_json_attrs_bad_JSON(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect("This is text rather than JSON data."),
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
None,
["key"],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
assert mock_logger.debug.called
def test_update_with_json_attrs_and_template(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock(
"rest.RestData.update",
side_effect=self.update_side_effect(
'{ "key": "json_state_updated_value" }'
),
)
self.sensor = rest.RestSensor(
self.hass,
self.rest,
self.name,
self.unit_of_measurement,
self.device_class,
self.value_template,
["key"],
self.force_update,
self.resource_template,
)
self.sensor.update()
assert "json_state_updated_value" == self.sensor.state
assert (
"json_state_updated_value" == self.sensor.device_state_attributes["key"]
), self.force_update
class TestRestData(unittest.TestCase):
"""Tests for RestData."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.method = "GET"
self.resource = "http://localhost"
self.verify_ssl = True
self.timeout = 10
self.rest = rest.RestData(
self.method, self.resource, None, None, None, self.verify_ssl, self.timeout
)
@requests_mock.Mocker()
def test_update(self, mock_req):
"""Test update."""
mock_req.get("http://localhost", text="test data")
self.rest.update()
assert "test data" == self.rest.data
@patch("requests.Session", side_effect=RequestException)
def test_update_request_exception(self, mock_req):
"""Test update when a request exception occurs."""
self.rest.update()
assert self.rest.data is None
| |
"""Posterior/Prior predictive plot."""
import logging
import warnings
from numbers import Integral
import numpy as np
from ..labels import BaseLabeller
from ..sel_utils import xarray_var_iter
from ..rcparams import rcParams
from ..utils import _var_names
from .plot_utils import default_grid, filter_plotters_list, get_plotting_function
_log = logging.getLogger(__name__)
def plot_ppc(
data,
kind="kde",
alpha=None,
mean=True,
observed=True,
color=None,
colors=None,
grid=None,
figsize=None,
textsize=None,
data_pairs=None,
var_names=None,
filter_vars=None,
coords=None,
flatten=None,
flatten_pp=None,
num_pp_samples=None,
random_seed=None,
jitter=None,
animated=False,
animation_kwargs=None,
legend=True,
labeller=None,
ax=None,
backend=None,
backend_kwargs=None,
group="posterior",
show=None,
):
"""
Plot for posterior/prior predictive checks.
Parameters
----------
data: az.InferenceData object
:class:`arviz.InferenceData` object containing the observed and posterior/prior
predictive data.
kind: str
Type of plot to display ("kde", "cumulative", or "scatter"). Defaults to `kde`.
alpha: float
Opacity of posterior/prior predictive density curves.
Defaults to 0.2 for ``kind = kde`` and cumulative, for scatter defaults to 0.7.
mean: bool
Whether or not to plot the mean posterior/prior predictive distribution.
Defaults to ``True``.
observed: bool, default True
Whether or not to plot the observed data.
color: str
Valid matplotlib ``color``. Defaults to ``C0``.
color: list
List with valid matplotlib colors corresponding to the posterior/prior predictive
distribution, observed data and mean of the posterior/prior predictive distribution.
Defaults to ["C0", "k", "C1"].
grid : tuple
Number of rows and columns. Defaults to None, the rows and columns are
automatically inferred.
figsize: tuple
Figure size. If None, it will be defined automatically.
textsize: float
Text size scaling factor for labels, titles and lines. If None, it will be
autoscaled based on ``figsize``.
data_pairs: dict
Dictionary containing relations between observed data and posterior/prior predictive data.
Dictionary structure:
- key = data var_name
- value = posterior/prior predictive var_name
For example, ``data_pairs = {'y' : 'y_hat'}``
If None, it will assume that the observed data and the posterior/prior
predictive data have the same variable name.
var_names: list of variable names
Variables to be plotted, if `None` all variable are plotted. Prefix the
variables by ``~`` when you want to exclude them from the plot.
filter_vars: {None, "like", "regex"}, optional, default=None
If `None` (default), interpret var_names as the real variables names. If "like",
interpret var_names as substrings of the real variables names. If "regex",
interpret var_names as regular expressions on the real variables names. A la
``pandas.filter``.
coords: dict
Dictionary mapping dimensions to selected coordinates to be plotted.
Dimensions without a mapping specified will include all coordinates for
that dimension. Defaults to including all coordinates for all
dimensions if None.
flatten: list
List of dimensions to flatten in ``observed_data``. Only flattens across the coordinates
specified in the ``coords`` argument. Defaults to flattening all of the dimensions.
flatten_pp: list
List of dimensions to flatten in posterior_predictive/prior_predictive. Only flattens
across the coordinates specified in the ``coords`` argument. Defaults to flattening all
of the dimensions. Dimensions should match flatten excluding dimensions for ``data_pairs``
parameters. If ``flatten`` is defined and ``flatten_pp`` is None, then
``flatten_pp = flatten``.
num_pp_samples: int
The number of posterior/prior predictive samples to plot. For ``kind`` = 'scatter' and
``animation = False`` if defaults to a maximum of 5 samples and will set jitter to 0.7.
unless defined. Otherwise it defaults to all provided samples.
random_seed: int
Random number generator seed passed to ``numpy.random.seed`` to allow
reproducibility of the plot. By default, no seed will be provided
and the plot will change each call if a random sample is specified
by ``num_pp_samples``.
jitter: float
If ``kind`` is "scatter", jitter will add random uniform noise to the height
of the ppc samples and observed data. By default 0.
animated: bool
Create an animation of one posterior/prior predictive sample per frame.
Defaults to ``False``. Only works with matploblib backend.
To run animations inside a notebook you have to use the `nbAgg` matplotlib's backend.
Try with `%matplotlib notebook` or `%matplotlib nbAgg`. You can switch back to the
default matplotlib's backend with `%matplotlib inline` or `%matplotlib auto`.
If switching back and forth between matplotlib's backend, you may need to run twice the cell
with the animation.
If you experience problems rendering the animation try setting
`animation_kwargs({'blit':False}`) or changing the matplotlib's backend (e.g. to TkAgg)
If you run the animation from a script write `ax, ani = az.plot_ppc(.)`
animation_kwargs : dict
Keywords passed to :class:`matplotlib.animation.FuncAnimation`. Ignored with
matplotlib backend.
legend : bool
Add legend to figure. By default ``True``.
labeller : labeller instance, optional
Class providing the method ``make_pp_label`` to generate the labels in the plot titles.
Read the :ref:`label_guide` for more details and usage examples.
ax: numpy array-like of matplotlib axes or bokeh figures, optional
A 2D array of locations into which to plot the densities. If not supplied, Arviz will create
its own array of plot areas (and return it).
backend: str, optional
Select plotting backend {"matplotlib","bokeh"}. Default to "matplotlib".
backend_kwargs: bool, optional
These are kwargs specific to the backend being used, passed to
:func:`matplotlib.pyplot.subplots` or :func:`bokeh.plotting.figure`.
For additional documentation check the plotting method of the backend.
group: {"prior", "posterior"}, optional
Specifies which InferenceData group should be plotted. Defaults to 'posterior'.
Other value can be 'prior'.
show: bool, optional
Call backend show function.
Returns
-------
axes: matplotlib axes or bokeh figures
See Also
--------
plot_bpv: Plot Bayesian p-value for observed data and Posterior/Prior predictive.
plot_lm: Posterior predictive and mean plots for regression-like data.
plot_ppc: plot for posterior/prior predictive checks.
plot_ts: Plot timeseries data.
Examples
--------
Plot the observed data KDE overlaid on posterior predictive KDEs.
.. plot::
:context: close-figs
>>> import arviz as az
>>> data = az.load_arviz_data('radon')
>>> az.plot_ppc(data, data_pairs={"y":"y"})
Plot the overlay with empirical CDFs.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, kind='cumulative')
Use the ``coords`` and ``flatten`` parameters to plot selected variable dimensions
across multiple plots. We will now modify the dimension ``obs_id`` to contain
indicate the name of the county where the measure was taken. The change has to
be done on both ``posterior_predictive`` and ``observed_data`` groups, which is
why we will use :meth:`~arviz.InferenceData.map` to apply the same function to
both groups. Afterwards, we will select the counties to be plotted with the
``coords`` arg.
.. plot::
:context: close-figs
>>> obs_county = data.posterior["County"][data.constant_data["county_idx"]]
>>> data = data.assign_coords(obs_id=obs_county, groups="observed_vars")
>>> az.plot_ppc(data, coords={'obs_id': ['ANOKA', 'BELTRAMI']}, flatten=[])
Plot the overlay using a stacked scatter plot that is particularly useful
when the sample sizes are small.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, kind='scatter', flatten=[],
>>> coords={'obs_id': ['AITKIN', 'BELTRAMI']})
Plot random posterior predictive sub-samples.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, num_pp_samples=30, random_seed=7)
"""
if group not in ("posterior", "prior"):
raise TypeError("`group` argument must be either `posterior` or `prior`")
for groups in (f"{group}_predictive", "observed_data"):
if not hasattr(data, groups):
raise TypeError(f'`data` argument must have the group "{groups}" for ppcplot')
if kind.lower() not in ("kde", "cumulative", "scatter"):
raise TypeError("`kind` argument must be either `kde`, `cumulative`, or `scatter`")
if colors is None:
colors = ["C0", "k", "C1"]
if isinstance(colors, str):
raise TypeError("colors should be a list with 3 items.")
if len(colors) != 3:
raise ValueError("colors should be a list with 3 items.")
if color is not None:
warnings.warn("color has been deprecated in favor of colors", FutureWarning)
colors[0] = color
if data_pairs is None:
data_pairs = {}
if backend is None:
backend = rcParams["plot.backend"]
backend = backend.lower()
if backend == "bokeh":
if animated:
raise TypeError("Animation option is only supported with matplotlib backend.")
observed_data = data.observed_data
if group == "posterior":
predictive_dataset = data.posterior_predictive
elif group == "prior":
predictive_dataset = data.prior_predictive
if var_names is None:
var_names = list(observed_data.data_vars)
var_names = _var_names(var_names, observed_data, filter_vars)
pp_var_names = [data_pairs.get(var, var) for var in var_names]
pp_var_names = _var_names(pp_var_names, predictive_dataset, filter_vars)
if flatten_pp is None and flatten is None:
flatten_pp = list(predictive_dataset.dims.keys())
elif flatten_pp is None:
flatten_pp = flatten
if flatten is None:
flatten = list(observed_data.dims.keys())
if coords is None:
coords = {}
if labeller is None:
labeller = BaseLabeller()
if random_seed is not None:
np.random.seed(random_seed)
total_pp_samples = predictive_dataset.sizes["chain"] * predictive_dataset.sizes["draw"]
if num_pp_samples is None:
if kind == "scatter" and not animated:
num_pp_samples = min(5, total_pp_samples)
else:
num_pp_samples = total_pp_samples
if (
not isinstance(num_pp_samples, Integral)
or num_pp_samples < 1
or num_pp_samples > total_pp_samples
):
raise TypeError(
"`num_pp_samples` must be an integer between 1 and " + f"{total_pp_samples}."
)
pp_sample_ix = np.random.choice(total_pp_samples, size=num_pp_samples, replace=False)
for key in coords.keys():
coords[key] = np.where(np.in1d(observed_data[key], coords[key]))[0]
obs_plotters = filter_plotters_list(
list(
xarray_var_iter(
observed_data.isel(coords),
skip_dims=set(flatten),
var_names=var_names,
combined=True,
)
),
"plot_ppc",
)
length_plotters = len(obs_plotters)
pp_plotters = [
tup
for _, tup in zip(
range(length_plotters),
xarray_var_iter(
predictive_dataset.isel(coords),
var_names=pp_var_names,
skip_dims=set(flatten_pp),
combined=True,
),
)
]
rows, cols = default_grid(length_plotters, grid=grid)
ppcplot_kwargs = dict(
ax=ax,
length_plotters=length_plotters,
rows=rows,
cols=cols,
figsize=figsize,
animated=animated,
obs_plotters=obs_plotters,
pp_plotters=pp_plotters,
predictive_dataset=predictive_dataset,
pp_sample_ix=pp_sample_ix,
kind=kind,
alpha=alpha,
colors=colors,
jitter=jitter,
textsize=textsize,
mean=mean,
observed=observed,
total_pp_samples=total_pp_samples,
legend=legend,
labeller=labeller,
group=group,
animation_kwargs=animation_kwargs,
num_pp_samples=num_pp_samples,
backend_kwargs=backend_kwargs,
show=show,
)
# TODO: Add backend kwargs
plot = get_plotting_function("plot_ppc", "ppcplot", backend)
axes = plot(**ppcplot_kwargs)
return axes
| |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.dialogflowcx_v3beta1.services.deployments import pagers
from google.cloud.dialogflowcx_v3beta1.types import deployment
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import DeploymentsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DeploymentsGrpcAsyncIOTransport
from .client import DeploymentsClient
class DeploymentsAsyncClient:
"""Service for managing
[Deployments][google.cloud.dialogflow.cx.v3beta1.Deployment].
"""
_client: DeploymentsClient
DEFAULT_ENDPOINT = DeploymentsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DeploymentsClient.DEFAULT_MTLS_ENDPOINT
deployment_path = staticmethod(DeploymentsClient.deployment_path)
parse_deployment_path = staticmethod(DeploymentsClient.parse_deployment_path)
experiment_path = staticmethod(DeploymentsClient.experiment_path)
parse_experiment_path = staticmethod(DeploymentsClient.parse_experiment_path)
test_case_result_path = staticmethod(DeploymentsClient.test_case_result_path)
parse_test_case_result_path = staticmethod(
DeploymentsClient.parse_test_case_result_path
)
version_path = staticmethod(DeploymentsClient.version_path)
parse_version_path = staticmethod(DeploymentsClient.parse_version_path)
common_billing_account_path = staticmethod(
DeploymentsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
DeploymentsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(DeploymentsClient.common_folder_path)
parse_common_folder_path = staticmethod(DeploymentsClient.parse_common_folder_path)
common_organization_path = staticmethod(DeploymentsClient.common_organization_path)
parse_common_organization_path = staticmethod(
DeploymentsClient.parse_common_organization_path
)
common_project_path = staticmethod(DeploymentsClient.common_project_path)
parse_common_project_path = staticmethod(
DeploymentsClient.parse_common_project_path
)
common_location_path = staticmethod(DeploymentsClient.common_location_path)
parse_common_location_path = staticmethod(
DeploymentsClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DeploymentsAsyncClient: The constructed client.
"""
return DeploymentsClient.from_service_account_info.__func__(DeploymentsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DeploymentsAsyncClient: The constructed client.
"""
return DeploymentsClient.from_service_account_file.__func__(DeploymentsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return DeploymentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> DeploymentsTransport:
"""Returns the transport used by the client instance.
Returns:
DeploymentsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(DeploymentsClient).get_transport_class, type(DeploymentsClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, DeploymentsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the deployments client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.DeploymentsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = DeploymentsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_deployments(
self,
request: Union[deployment.ListDeploymentsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListDeploymentsAsyncPager:
r"""Returns the list of all deployments in the specified
[Environment][google.cloud.dialogflow.cx.v3beta1.Environment].
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_list_deployments():
# Create a client
client = dialogflowcx_v3beta1.DeploymentsClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.ListDeploymentsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_deployments(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.ListDeploymentsRequest, dict]):
The request object. The request message for
[Deployments.ListDeployments][google.cloud.dialogflow.cx.v3beta1.Deployments.ListDeployments].
parent (:class:`str`):
Required. The
[Environment][google.cloud.dialogflow.cx.v3beta1.Environment]
to list all environments for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/environments/<Environment ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.services.deployments.pagers.ListDeploymentsAsyncPager:
The response message for
[Deployments.ListDeployments][google.cloud.dialogflow.cx.v3beta1.Deployments.ListDeployments].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = deployment.ListDeploymentsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_deployments,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListDeploymentsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_deployment(
self,
request: Union[deployment.GetDeploymentRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> deployment.Deployment:
r"""Retrieves the specified
[Deployment][google.cloud.dialogflow.cx.v3beta1.Deployment].
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_get_deployment():
# Create a client
client = dialogflowcx_v3beta1.DeploymentsClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.GetDeploymentRequest(
name="name_value",
)
# Make the request
response = client.get_deployment(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.GetDeploymentRequest, dict]):
The request object. The request message for
[Deployments.GetDeployment][google.cloud.dialogflow.cx.v3beta1.Deployments.GetDeployment].
name (:class:`str`):
Required. The name of the
[Deployment][google.cloud.dialogflow.cx.v3beta1.Deployment].
Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/environments/<Environment ID>/deployments/<Deployment ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Deployment:
Represents an deployment in an
environment. A deployment happens when a
flow version configured to be active in
the environment. You can configure
running pre-deployment steps, e.g.
running validation test cases,
experiment auto-rollout, etc.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = deployment.GetDeploymentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_deployment,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflowcx",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DeploymentsAsyncClient",)
| |
from baselines.common import Dataset, explained_variance, fmt_row, zipsame
from baselines import logger
import baselines.common.tf_util as U
import tensorflow as tf, numpy as np
import time
from baselines.common.mpi_adam import MpiAdam
from baselines.common.mpi_moments import mpi_moments
from mpi4py import MPI
from collections import deque
def traj_segment_generator(pi, env, horizon, stochastic):
t = 0
ac = env.action_space.sample() # not used, just so we have the datatype
new = True # marks if we're on first timestep of an episode
ob = env.reset()
cur_ep_ret = 0 # return in current episode
cur_ep_len = 0 # len of current episode
ep_rets = [] # returns of completed episodes in this segment
ep_lens = [] # lengths of ...
# Initialize history arrays
obs = np.array([ob for _ in range(horizon)])
rews = np.zeros(horizon, 'float32')
vpreds = np.zeros(horizon, 'float32')
news = np.zeros(horizon, 'int32')
acs = np.array([ac for _ in range(horizon)])
prevacs = acs.copy()
while True:
prevac = ac
ac, vpred = pi.act(stochastic, ob)
# Slight weirdness here because we need value function at time T
# before returning segment [0, T-1] so we get the correct
# terminal value
if t > 0 and t % horizon == 0:
yield {"ob" : obs, "rew" : rews, "vpred" : vpreds, "new" : news,
"ac" : acs, "prevac" : prevacs, "nextvpred": vpred * (1 - new),
"ep_rets" : ep_rets, "ep_lens" : ep_lens}
# Be careful!!! if you change the downstream algorithm to aggregate
# several of these batches, then be sure to do a deepcopy
ep_rets = []
ep_lens = []
i = t % horizon
obs[i] = ob
vpreds[i] = vpred
news[i] = new
acs[i] = ac
prevacs[i] = prevac
ob, rew, new, _ = env.step(ac)
rews[i] = rew
cur_ep_ret += rew
cur_ep_len += 1
if new:
ep_rets.append(cur_ep_ret)
ep_lens.append(cur_ep_len)
cur_ep_ret = 0
cur_ep_len = 0
ob = env.reset()
t += 1
def add_vtarg_and_adv(seg, gamma, lam):
"""
Compute target value using TD(lambda) estimator, and advantage with GAE(lambda)
"""
new = np.append(seg["new"], 0) # last element is only used for last vtarg, but we already zeroed it if last new = 1
vpred = np.append(seg["vpred"], seg["nextvpred"])
T = len(seg["rew"])
seg["adv"] = gaelam = np.empty(T, 'float32')
rew = seg["rew"]
lastgaelam = 0
for t in reversed(range(T)):
nonterminal = 1-new[t+1]
delta = rew[t] + gamma * vpred[t+1] * nonterminal - vpred[t]
gaelam[t] = lastgaelam = delta + gamma * lam * nonterminal * lastgaelam
seg["tdlamret"] = seg["adv"] + seg["vpred"]
def learn(env, policy_func, *,
timesteps_per_batch, # timesteps per actor per update
clip_param, entcoeff, # clipping parameter epsilon, entropy coeff
optim_epochs, optim_stepsize, optim_batchsize,# optimization hypers
gamma, lam, # advantage estimation
max_timesteps=0, max_episodes=0, max_iters=0, max_seconds=0, # time constraint
callback=None, # you can do anything in the callback, since it takes locals(), globals()
schedule='constant' # annealing for stepsize parameters (epsilon and adam)
):
# Setup losses and stuff
# ----------------------------------------
ob_space = env.observation_space
ac_space = env.action_space
pi = policy_func("pi", ob_space, ac_space) # Construct network for new policy
oldpi = policy_func("oldpi", ob_space, ac_space) # Network for old policy
atarg = tf.placeholder(dtype=tf.float32, shape=[None]) # Target advantage function (if applicable)
ret = tf.placeholder(dtype=tf.float32, shape=[None]) # Empirical return
lrmult = tf.placeholder(name='lrmult', dtype=tf.float32, shape=[]) # learning rate multiplier, updated with schedule
clip_param = clip_param * lrmult # Annealed cliping parameter epislon
ob = U.get_placeholder_cached(name="ob")
ac = pi.pdtype.sample_placeholder([None])
kloldnew = oldpi.pd.kl(pi.pd)
ent = pi.pd.entropy()
meankl = U.mean(kloldnew)
meanent = U.mean(ent)
pol_entpen = (-entcoeff) * meanent
ratio = tf.exp(pi.pd.logp(ac) - oldpi.pd.logp(ac)) # pnew / pold
surr1 = ratio * atarg # surrogate from conservative policy iteration
surr2 = U.clip(ratio, 1.0 - clip_param, 1.0 + clip_param) * atarg #
pol_surr = - U.mean(tf.minimum(surr1, surr2)) # PPO's pessimistic surrogate (L^CLIP)
vfloss1 = tf.square(pi.vpred - ret)
vpredclipped = oldpi.vpred + tf.clip_by_value(pi.vpred - oldpi.vpred, -clip_param, clip_param)
vfloss2 = tf.square(vpredclipped - ret)
vf_loss = .5 * U.mean(tf.maximum(vfloss1, vfloss2)) # we do the same clipping-based trust region for the value function
total_loss = pol_surr + pol_entpen + vf_loss
losses = [pol_surr, pol_entpen, vf_loss, meankl, meanent]
loss_names = ["pol_surr", "pol_entpen", "vf_loss", "kl", "ent"]
var_list = pi.get_trainable_variables()
lossandgrad = U.function([ob, ac, atarg, ret, lrmult], losses + [U.flatgrad(total_loss, var_list)])
adam = MpiAdam(var_list)
assign_old_eq_new = U.function([],[], updates=[tf.assign(oldv, newv)
for (oldv, newv) in zipsame(oldpi.get_variables(), pi.get_variables())])
compute_losses = U.function([ob, ac, atarg, ret, lrmult], losses)
U.initialize()
adam.sync()
# Prepare for rollouts
# ----------------------------------------
seg_gen = traj_segment_generator(pi, env, timesteps_per_batch, stochastic=True)
episodes_so_far = 0
timesteps_so_far = 0
iters_so_far = 0
tstart = time.time()
lenbuffer = deque(maxlen=100) # rolling buffer for episode lengths
rewbuffer = deque(maxlen=100) # rolling buffer for episode rewards
assert sum([max_iters>0, max_timesteps>0, max_episodes>0, max_seconds>0])==1, "Only one time constraint permitted"
while True:
if callback: callback(locals(), globals())
if max_timesteps and timesteps_so_far >= max_timesteps:
break
elif max_episodes and episodes_so_far >= max_episodes:
break
elif max_iters and iters_so_far >= max_iters:
break
elif max_seconds and time.time() - tstart >= max_seconds:
break
if schedule == 'constant':
cur_lrmult = 1.0
elif schedule == 'linear':
cur_lrmult = max(1.0 - float(timesteps_so_far) / max_timesteps, 0)
else:
raise NotImplementedError
logger.log("********** Iteration %i ************"%iters_so_far)
seg = seg_gen.__next__()
add_vtarg_and_adv(seg, gamma, lam)
# ob, ac, atarg, ret, td1ret = map(np.concatenate, (obs, acs, atargs, rets, td1rets))
ob, ac, atarg, tdlamret = seg["ob"], seg["ac"], seg["adv"], seg["tdlamret"]
vpredbefore = seg["vpred"] # predicted value function before udpate
atarg = (atarg - atarg.mean()) / atarg.std() # standardized advantage function estimate
d = Dataset(dict(ob=ob, ac=ac, atarg=atarg, vtarg=tdlamret), shuffle=not pi.recurrent)
optim_batchsize = optim_batchsize or ob.shape[0]
if hasattr(pi, "ob_rms"): pi.ob_rms.update(ob) # update running mean/std for policy
assign_old_eq_new() # set old parameter values to new parameter values
logger.log("Optimizing...")
logger.log(fmt_row(13, loss_names))
# Here we do a bunch of optimization epochs over the data
for _ in range(optim_epochs):
losses = [] # list of tuples, each of which gives the loss for a minibatch
for batch in d.iterate_once(optim_batchsize):
*newlosses, g = lossandgrad(batch["ob"], batch["ac"], batch["atarg"], batch["vtarg"], cur_lrmult)
adam.update(g, optim_stepsize * cur_lrmult)
losses.append(newlosses)
logger.log(fmt_row(13, np.mean(losses, axis=0)))
logger.log("Evaluating losses...")
losses = []
for batch in d.iterate_once(optim_batchsize):
newlosses = compute_losses(batch["ob"], batch["ac"], batch["atarg"], batch["vtarg"], cur_lrmult)
losses.append(newlosses)
meanlosses,_,_ = mpi_moments(losses, axis=0)
logger.log(fmt_row(13, meanlosses))
for (lossval, name) in zipsame(meanlosses, loss_names):
logger.record_tabular("loss_"+name, lossval)
logger.record_tabular("ev_tdlam_before", explained_variance(vpredbefore, tdlamret))
lrlocal = (seg["ep_lens"], seg["ep_rets"]) # local values
listoflrpairs = MPI.COMM_WORLD.allgather(lrlocal) # list of tuples
lens, rews = map(flatten_lists, zip(*listoflrpairs))
lenbuffer.extend(lens)
rewbuffer.extend(rews)
logger.record_tabular("EpLenMean", np.mean(lenbuffer))
logger.record_tabular("EpRewMean", np.mean(rewbuffer))
logger.record_tabular("EpThisIter", len(lens))
episodes_so_far += len(lens)
timesteps_so_far += sum(lens)
iters_so_far += 1
logger.record_tabular("EpisodesSoFar", episodes_so_far)
logger.record_tabular("TimestepsSoFar", timesteps_so_far)
logger.record_tabular("TimeElapsed", time.time() - tstart)
if MPI.COMM_WORLD.Get_rank()==0:
logger.dump_tabular()
def flatten_lists(listoflists):
return [el for list_ in listoflists for el in list_]
| |
#=========================================================================
# pisa_beq_test.py
#=========================================================================
import pytest
import random
import pisa_encoding
from pymtl import Bits
from PisaSim import PisaSim
from pisa_inst_test_utils import *
#-------------------------------------------------------------------------
# gen_basic_test
#-------------------------------------------------------------------------
def gen_basic_test():
return """
# Use r3 to track the control flow pattern
addiu r3, r0, 0
mfc0 r1, mngr2proc < 2
mfc0 r2, mngr2proc < 2
nop
nop
nop
nop
nop
nop
nop
nop
# This branch should be taken
beq r1, r2, label_a
ori r3, r3, 0b01
nop
nop
nop
nop
nop
nop
nop
nop
label_a:
ori r3, r3, 0b10
# Only the second bit should be set if branch was taken
mtc0 r3, proc2mngr > 0b10
"""
#-------------------------------------------------------------------------
# gen_src0_byp_taken_test
#-------------------------------------------------------------------------
def gen_src0_byp_taken_test():
return [
gen_br2_src0_byp_test( 5, "beq", 1, 1, True ),
gen_br2_src0_byp_test( 4, "beq", 2, 2, True ),
gen_br2_src0_byp_test( 3, "beq", 3, 3, True ),
gen_br2_src0_byp_test( 2, "beq", 4, 4, True ),
gen_br2_src0_byp_test( 1, "beq", 5, 5, True ),
gen_br2_src0_byp_test( 0, "beq", 6, 6, True ),
]
#-------------------------------------------------------------------------
# gen_src0_byp_nottaken_test
#-------------------------------------------------------------------------
def gen_src0_byp_nottaken_test():
return [
gen_br2_src0_byp_test( 5, "beq", 1, 2, False ),
gen_br2_src0_byp_test( 4, "beq", 2, 3, False ),
gen_br2_src0_byp_test( 3, "beq", 3, 4, False ),
gen_br2_src0_byp_test( 2, "beq", 4, 5, False ),
gen_br2_src0_byp_test( 1, "beq", 5, 6, False ),
gen_br2_src0_byp_test( 0, "beq", 6, 7, False ),
]
#-------------------------------------------------------------------------
# gen_src1_byp_taken_test
#-------------------------------------------------------------------------
def gen_src1_byp_taken_test():
return [
gen_br2_src1_byp_test( 5, "beq", 1, 1, True ),
gen_br2_src1_byp_test( 4, "beq", 2, 2, True ),
gen_br2_src1_byp_test( 3, "beq", 3, 3, True ),
gen_br2_src1_byp_test( 2, "beq", 4, 4, True ),
gen_br2_src1_byp_test( 1, "beq", 5, 5, True ),
gen_br2_src1_byp_test( 0, "beq", 6, 6, True ),
]
#-------------------------------------------------------------------------
# gen_src1_byp_nottaken_test
#-------------------------------------------------------------------------
def gen_src1_byp_nottaken_test():
return [
gen_br2_src1_byp_test( 5, "beq", 1, 2, False ),
gen_br2_src1_byp_test( 4, "beq", 2, 3, False ),
gen_br2_src1_byp_test( 3, "beq", 3, 4, False ),
gen_br2_src1_byp_test( 2, "beq", 4, 5, False ),
gen_br2_src1_byp_test( 1, "beq", 5, 6, False ),
gen_br2_src1_byp_test( 0, "beq", 6, 7, False ),
]
#-------------------------------------------------------------------------
# gen_srcs_byp_taken_test
#-------------------------------------------------------------------------
def gen_srcs_byp_taken_test():
return [
gen_br2_srcs_byp_test( 5, "beq", 1, 1, True ),
gen_br2_srcs_byp_test( 4, "beq", 2, 2, True ),
gen_br2_srcs_byp_test( 3, "beq", 3, 3, True ),
gen_br2_srcs_byp_test( 2, "beq", 4, 4, True ),
gen_br2_srcs_byp_test( 1, "beq", 5, 5, True ),
gen_br2_srcs_byp_test( 0, "beq", 6, 6, True ),
]
#-------------------------------------------------------------------------
# gen_srcs_byp_nottaken_test
#-------------------------------------------------------------------------
def gen_srcs_byp_nottaken_test():
return [
gen_br2_srcs_byp_test( 5, "beq", 1, 2, False ),
gen_br2_srcs_byp_test( 4, "beq", 2, 3, False ),
gen_br2_srcs_byp_test( 3, "beq", 3, 4, False ),
gen_br2_srcs_byp_test( 2, "beq", 4, 5, False ),
gen_br2_srcs_byp_test( 1, "beq", 5, 6, False ),
gen_br2_srcs_byp_test( 0, "beq", 6, 7, False ),
]
#-------------------------------------------------------------------------
# gen_src0_eq_src1_nottaken_test
#-------------------------------------------------------------------------
def gen_src0_eq_src1_test():
return [
gen_br2_src0_eq_src1_test( "beq", 1, True ),
]
#-------------------------------------------------------------------------
# gen_value_test
#-------------------------------------------------------------------------
def gen_value_test():
return [
gen_br2_value_test( "beq", -1, -1, True ),
gen_br2_value_test( "beq", -1, 0, False ),
gen_br2_value_test( "beq", -1, 1, False ),
gen_br2_value_test( "beq", 0, -1, False ),
gen_br2_value_test( "beq", 0, 0, True ),
gen_br2_value_test( "beq", 0, 1, False ),
gen_br2_value_test( "beq", 1, -1, False ),
gen_br2_value_test( "beq", 1, 0, False ),
gen_br2_value_test( "beq", 1, 1, True ),
gen_br2_value_test( "beq", 0xfffffff7, 0xfffffff7, True ),
gen_br2_value_test( "beq", 0x7fffffff, 0x7fffffff, True ),
gen_br2_value_test( "beq", 0xfffffff7, 0x7fffffff, False ),
gen_br2_value_test( "beq", 0x7fffffff, 0xfffffff7, False ),
]
#-------------------------------------------------------------------------
# gen_random_test
#-------------------------------------------------------------------------
def gen_random_test():
asm_code = []
for i in xrange(25):
src0 = Bits( 32, random.randint(0,0xffffffff) )
src1 = Bits( 32, random.randint(0,0xffffffff) )
taken = ( src0 == src1 )
asm_code.append( gen_br2_value_test( "beq", src0.uint(), src1.uint(), taken ) )
return asm_code
#-------------------------------------------------------------------------
# test_basic
#-------------------------------------------------------------------------
@pytest.mark.parametrize( "name,test", [
asm_test( gen_basic_test ),
asm_test( gen_src0_byp_taken_test ),
asm_test( gen_src0_byp_nottaken_test ),
asm_test( gen_src1_byp_taken_test ),
asm_test( gen_src1_byp_nottaken_test ),
asm_test( gen_srcs_byp_taken_test ),
asm_test( gen_srcs_byp_nottaken_test ),
asm_test( gen_src0_eq_src1_test ),
asm_test( gen_value_test ),
asm_test( gen_random_test ),
])
def test( name, test ):
sim = PisaSim( trace_en=True )
sim.load( pisa_encoding.assemble( test() ) )
sim.run()
| |
import copy
from CellData import CellData
__author__ = 'Sergey'
class Sudoku(object):
def __init__(self, int_input_array):
self.solved = [[0 for i in range(0, 9)] for i in range(0, 9)]
for i in range(9):
for j in range(9):
self.solved[i][j] = CellData(int_input_array[i][j])
@property
def is_solved(self):
for i in range(9):
for j in range(9):
if not self.solved[i][j].is_solved:
return False
return True
def solve(self):
for i in range(20):
self.loner()
if self.is_solved:
return
self.hidden_loner()
if self.is_solved:
return
self.open_pairs()
if self.is_solved:
return
self.hold_candidat()
if self.is_solved:
return
def hold_candidat(self):
# TODO: need to implement
pass
def open_pairs(self):
# row
for i in range(9):
pair, positions = self.find_open_pairs(self.get_row_cells(i))
if pair is None:
continue
for j in range(9):
if j in positions:
continue
for s in pair:
if s in self.solved[i][j].suggests:
self.solved[i][j].suggests.remove(s)
#col
for j in range(9):
pair, positions = self.find_open_pairs(self.get_col_cells(j))
if pair is None:
continue
for i in range(9):
if i in positions:
continue
for s in pair:
if s in self.solved[i][j].suggests:
self.solved[i][j].suggests.remove(s)
#sect
for i in range(0, 9, 3):
for j in range(0, 9, 3):
pair, positions = self.find_open_pairs(self.get_sect_cells(i, j))
if pair is None:
continue
for place in range(9):
if place in positions:
continue
(x, y) = self.get_cell_position_in_sect(i, j, place)
for s in pair:
if s in self.solved[x][y].suggests:
self.solved[x][y].suggests.remove(s)
def find_open_pairs(self, cells):
expected_pair = [cell for cell in cells if len(cell.suggests) == 2]
pair = []
for i in range(len(expected_pair)):
for j in range(i, len(expected_pair)- i):
result_length = len(set(expected_pair[i].suggests) - set(expected_pair[j].suggests))
if result_length == 0:
pair = expected_pair[i].suggests
if not pair:
return None, None
positions = []
for i in range(9):
if len(set(pair) - set(cells[i].suggests)) == 0 and len(cells[i].suggests) == 2:
positions.append(i)
return (pair, positions) if len(positions) == 2 else (None, None)
def hidden_loner(self):
# row.
while True:
is_updated = False
for i in range(9):
result = self.find_hidden_longer(self.get_row_cells(i))
if result is None:
continue
for (value, place) in result:
# r (value, <cell place>)
self.solved[i][place].suggests = [value, ]
self.solved[i][place].mark_solved()
is_updated = True
# col
for j in range(9):
result = self.find_hidden_longer(self.get_col_cells(j))
if result is None:
continue
for (value, place) in result:
# r (value, <cell place>)
self.solved[place][j].suggests = [value, ]
self.solved[place][j].mark_solved()
is_updated = True
# sect
for i in range(0, 9, 3):
for j in range(0, 9, 3):
result = self.find_hidden_longer(self.get_sect_cells(i, j))
# mark hidden longer
if result is None:
continue
for (value, place) in result:
# r (value, <cell place>)
(x, y) = self.get_cell_position_in_sect(i, j, place)
self.solved[x][y].suggests = [value, ]
self.solved[x][y].mark_solved()
is_updated = True
if not is_updated:
break
def get_cell_position_in_sect(self, i_delta, j_delta, cell_place):
"""
cell_place: 0,1,2
0,1,2,3,4,5,6,7,8 -->> 3,4,5 -->>
6,7,8
:param i_delta:
:param j_delta:
:param cell_place:
:return:
"""
i_result = 0
j_result = 0
if 0 <= cell_place < 3:
# i = 0, j = [0,1,2]
i_result = 0 + i_delta
j_result = cell_place + j_delta
elif 3 <= cell_place < 6:
# i=1, j = [0,1,2]
i_result = 1 + i_delta
j_result = cell_place + j_delta - 3
elif 6 <= cell_place <=8:
# i=2 j = [0,1,2]
i_result = 2 + i_delta
j_result = cell_place + j_delta - 6
return (i_result, j_result)
def find_hidden_longer(self, cells):
_cells = copy.deepcopy(cells)
expected_hidden_value = [1, 2, 3, 4, 5, 6, 7, 8, 9]
remaining_list = list(set(expected_hidden_value) - set([cell.value for cell in _cells if cell.is_solved]))
suggested_lists = [cell.suggests for cell in _cells if not cell.is_solved]
# remove from suggested lists already found.
solved_cells_value = [cell.value for cell in cells if cell.is_solved]
for s_list in suggested_lists:
for value in solved_cells_value:
if value in s_list:
s_list.remove(value)
for remain_digit in remaining_list:
digits_count = len([1 for l in suggested_lists if remain_digit in l])
# need to remove from suggested_lists not satisfying digit
if digits_count > 1:
[s_list.remove(remain_digit) for s_list in suggested_lists if remain_digit in s_list]
found_hidden_longer_list = [s[0] for s in suggested_lists if len(s) == 1]
# We DIDN'T fIND hidden longer in given cells.
if len([1 for s in suggested_lists if len(s) == 1]) == 0:
return None
result = []
for hidden_longer in found_hidden_longer_list:
for i in range(9):
if hidden_longer in cells[i].suggests:
result.append((hidden_longer, i))
return result
def loner(self):
while True:
is_updated = False
for i in range(9):
for j in range(9):
if self.solved[i][j].is_solved:
continue
is_updated = is_updated or self.update_suggests(i, j)
if not is_updated:
break
def update_suggests(self, i, j):
self.solved[i][j].suggests = list(set(self.solved[i][j].suggests) - set([cell.value for cell in self.get_row_cells(i) if cell.is_solved]))
self.solved[i][j].suggests = list(set(self.solved[i][j].suggests) - set([cell.value for cell in self.get_col_cells(j) if cell.is_solved]))
self.solved[i][j].suggests = list(set(self.solved[i][j].suggests) - set([cell.value for cell in self.get_sect_cells(i, j) if cell.is_solved]))
return self.solved[i][j].mark_solved()
def get_row_cells(self, i):
cell_list = []
for j in range(9):
cell_list.append(self.solved[i][j])
return cell_list
def get_col_cells(self, j):
cell_list = []
for i in range(9):
cell_list.append(self.solved[i][j])
return cell_list
def get_sect_cells(self, i, j):
sect_list = []
i_corner = 0
j_corner = 0
if 0 < i <= 2:
i_corner = 0
elif 2 < i <= 5:
i_corner = 3
elif 5 < i <= 8:
i_corner = 6
if 0 < j <= 2:
j_corner = 0
elif 2 < j <= 5:
j_corner = 3
elif 5 < j <= 8:
j_corner = 6
for i in range(i_corner, i_corner+3):
for j in range(j_corner, j_corner+3):
sect_list.append(self.solved[i][j])
return sect_list
def draw_sudoku(self):
for i in range(9):
for j in range(9):
print "|" + str(self.solved[i][j].value),
print ""
def draw_suggested_values(self):
cells_str_result = [["" for i in range(9)] for i in range(9)]
for i in range(9):
for j in range(9):
solve_or_suggest = str(self.solved[i][j].value) if self.solved[i][j].is_solved else str(self.solved[i][j].suggests)
cells_str_result[i][j] = solve_or_suggest
coll_width = []
for j in range(9):
max_coll_width = 1
for i in range(9):
max_coll_width = len(cells_str_result[i][j]) if len(cells_str_result[i][j]) > max_coll_width else max_coll_width
coll_width.append(max_coll_width)
for i in range(9):
if not i%3:
for j in range(9):
if not j%3:
print "|",
print "|" + "-" * coll_width[j],
print ""
for j in range(9):
if not j%3:
print "|",
cells_str_result[i][j] += " " * (coll_width[j] - len(cells_str_result[i][j]))
print "|"+cells_str_result[i][j],
print ""
def draw(self):
for i in range(9):
print self.solved[i]
| |
from couchbase.exceptions import CouchbaseError
from couchbase.views.iterator import AlreadyQueriedError
from couchbase import _to_json
from couchbase._pyport import unicode
def _genprop(converter, *apipaths):
def fget(self):
d = self._json_
try:
for x in apipaths:
d = d[x]
return d
except KeyError:
return None
def fset(self, value):
value = converter(value)
d = self._json_
for x in apipaths[:-1]:
d = d.setdefault(x, {})
d[apipaths[-1]] = value
def fdel(self):
d = self._json_
try:
for x in apipaths[:-1]:
d = d[x]
del d[apipaths[-1]]
except KeyError:
pass
return property(fget, fset, fdel)
def _genprop_str(*apipaths):
return _genprop(unicode, *apipaths)
def _highlight(fmt):
if fmt not in ('html', 'ansi'):
raise ValueError(
'Highlight must be "html" or "ansi", got {0}'.format(fmt))
return fmt
def _assign_kwargs(self, kwargs):
for k in kwargs:
if not hasattr(self, k):
raise AttributeError(k, 'Not valid for', self.__class__.__name__)
setattr(self, k, kwargs[k])
ALL_FIELDS = object()
class _Facet(object):
def __init__(self, field):
self._json_ = {'field': field}
@property
def encodable(self):
return self._json_
field = _genprop_str('field')
def __repr__(self):
return '{0.__class__.__name__}<{0._json_!r}>'.format(self)
class TermFacet(_Facet):
def __init__(self, field, limit=0):
super(TermFacet, self).__init__(field)
if limit:
self.limit = limit
limit = _genprop(int, 'size')
def _mk_range_bucket(name, n1, n2, r1, r2):
d = {}
if r1 is not None:
d[n1] = r1
if r2 is not None:
d[n2] = r2
if not d:
raise ValueError('Must specify at least one range boundary!')
d['name'] = name
return d
class DateFacet(_Facet):
def __init__(self, field):
super(DateFacet, self).__init__(field)
self._ranges = []
def add_range(self, name, start=None, end=None):
self._ranges.append(_mk_range_bucket(name, 'start', 'end', start, end))
return self
_ranges = _genprop(list, 'date_ranges')
class NumericFacet(_Facet):
def __init__(self, field):
super(NumericFacet, self).__init__(field)
self._ranges = []
def add_range(self, name, min=None, max=None):
self._ranges.append(_mk_range_bucket(name, 'min', 'max', min, max))
return self
_ranges = _genprop(list, 'numeric_ranges')
class _FacetDict(dict):
def __setitem__(self, key, value):
if not isinstance(value, _Facet):
raise ValueError('Can only add facet')
if hasattr(value, '_ranges') and not getattr(value, '_ranges'):
raise ValueError('{} object must have at least one range. Use '
'add_range'.format(value.__class__.__name__))
super(_FacetDict, self).__setitem__(key, value)
class Params(object):
def __init__(self, **kwargs):
self._json_ = {}
self.facets = _FacetDict(**kwargs.pop('facets', {}))
_assign_kwargs(self, kwargs)
@property
def encodable(self):
if self.facets:
self._json_['facets'] = {
n: x.encodable for n, x in self.facets.items()
}
return self._json_
limit = _genprop(int, 'size')
skip = _genprop(int, 'from')
explain = _genprop(bool, 'explain')
fields = _genprop(list, 'fields')
timeout = _genprop(lambda x: int(x * 1000), 'ctl', 'timeout')
highlight_style = _genprop(_highlight, 'highlight', 'style')
highlight_fields = _genprop(list, 'highlight', 'fields')
class Query(object):
def __init__(self):
self._json_ = {}
boost = _genprop(float, 'boost')
@property
def encodable(self):
return self._json_
class RawQuery(Query):
def __init__(self, obj):
super(RawQuery, self).__init__()
self._json_ = obj
class _QMeta(type):
def __new__(mcs, name, bases, dict):
if '__init__' not in dict:
def initfn(self, term, **kwargs):
bases[0].__init__(self, term, **kwargs)
dict['__init__'] = initfn
return super(_QMeta, mcs).__new__(mcs, name, bases, dict)
class _SingleQuery(Query):
_TERMFIELD = []
__metaclass__ = _QMeta
def __init__(self, term, **kwargs):
super(_SingleQuery, self).__init__()
self._json_[self._TERMFIELD] = term
_assign_kwargs(self, kwargs)
class StringQuery(_SingleQuery):
_TERMFIELD = 'query'
query = _genprop_str(_TERMFIELD)
class MatchQuery(_SingleQuery):
_TERMFIELD = 'match'
match = _genprop_str(_TERMFIELD)
prefix_length = _genprop(int, 'prefix_length')
fuzziness = _genprop(int, 'fuzziness')
field = _genprop_str('field')
analyzer = _genprop_str('analyzer')
class FuzzyQuery(_SingleQuery):
_TERMFIELD = 'term'
fuzziness = _genprop(int, 'fuzziness')
prefix_length = _genprop(int, 'prefix_length')
field = _genprop_str('field')
class MatchPhraseQuery(_SingleQuery):
_TERMFIELD = 'match_phrase'
match_phrase = _genprop_str(_TERMFIELD)
field = _genprop_str('field')
analyzer = _genprop_str('analyzer')
class PrefixQuery(_SingleQuery):
_TERMFIELD = 'prefix'
field = _genprop_str('field')
prefix = _genprop_str(_TERMFIELD)
class RegexQuery(_SingleQuery):
_TERMFIELD = 'regexp'
field = _genprop_str('field')
regex = _genprop_str(_TERMFIELD)
class _RangeQuery(Query):
def __init__(self, **kwargs):
super(_RangeQuery, self).__init__()
_assign_kwargs(self, kwargs)
class NumericRangeQuery(_RangeQuery):
def __init__(self, range_, **kwargs):
super(NumericRangeQuery, self).__init__(**kwargs)
self.min, self.max = range_
min = _genprop(int, 'min')
min_inclusive = _genprop(bool, 'min_inclusive')
max = _genprop(int, 'max')
max_inclusive = _genprop(bool, 'max_inclusive')
field = _genprop_str('field')
class DateRangeQuery(_RangeQuery):
def __init__(self, range_, **kwargs):
self.start, self.end = range_
super(DateRangeQuery, self).__init__(**kwargs)
start = _genprop_str('start')
end = _genprop_str('end')
start_inclusive = _genprop(bool, 'start_inclusive')
end_inclusive = _genprop(bool, 'end_inclusive')
field = _genprop_str('field')
date_time_parser = _genprop_str('datetime_parser')
class _CompoundQuery(Query):
_COMPOUND_FIELDS = []
def __init__(self, **kwargs):
super(_CompoundQuery, self).__init__()
_assign_kwargs(self, kwargs)
@property
def encodable(self):
js = self._json_.copy()
for src, tgt in self._COMPOUND_FIELDS:
objs = getattr(self, src)
if not objs:
continue
js[tgt] = [q.encodable for q in objs]
return js
class ConjunctionQuery(_CompoundQuery):
_COMPOUND_FIELDS = (('conjuncts',) * 2,)
def __init__(self, *queries):
super(ConjunctionQuery, self).__init__()
self.conjuncts = list(queries)
class DisjunctionQuery(_CompoundQuery):
_COMPOUND_FIELDS = (('disjuncts',) * 2,)
def __init__(self, *queries, **kwargs):
super(DisjunctionQuery, self).__init__()
_assign_kwargs(self, kwargs)
self.disjuncts = list(queries)
min = _genprop(int, 'min')
def _bprop_wrap(name, reqtype):
def fget(self):
return self._subqueries.get(name)
def fset(self, value):
if value is None:
if name in self._subqueries:
del self._subqueries[name]
elif isinstance(value, reqtype):
self._subqueries[name] = value
elif isinstance(value, Query):
self._subqueries[name] = reqtype(value)
else:
try:
it = iter(value)
except ValueError:
raise ValueError('Value must be instance of Query')
l = []
for q in it:
if not isinstance(q, Query):
raise ValueError('Item is not a query!', q)
l.append(q)
self._subqueries = reqtype(*l)
def fdel(self):
setattr(self, name, None)
return property(fget, fset, fdel)
class BooleanQuery(Query):
def __init__(self, must=None, should=None, must_not=None):
super(BooleanQuery, self).__init__()
self._subqueries = {}
self.must = must
self.should = should
self.must_not = must_not
must = _bprop_wrap('must', ConjunctionQuery)
must_not = _bprop_wrap('must_not', DisjunctionQuery)
should = _bprop_wrap('should', DisjunctionQuery)
@property
def encodable(self):
for src, tgt in ((self.must, 'must'),
(self.must_not, 'must_not'),
(self.should, 'should')):
if src:
self._json_[tgt] = src.encodable
print self._json_
return self._json_
class SearchError(CouchbaseError):
pass
def make_search_body(index, query, params=None):
"""
Generates a dictionary suitable for encoding as the search body
:param index: The index name to query
:param query: The query itself
:param params: Modifiers for the query
:return: A dictionary suitable for serialization
"""
dd = {}
if not isinstance(query, Query):
query = StringQuery(query)
dd['query'] = query.encodable
if params:
dd.update(params.encodable)
dd['indexName'] = index
return dd
class SearchRequest(object):
def __init__(self, body, parent, row_factory=lambda x: x):
"""
Object representing the execution of the request on the
server.
.. warning::
You should typically not call this constructor by
yourself, rather use the :meth:`~.Bucket.fts_query`
method (or one of its async derivatives).
:param params: An :class:`N1QLQuery` object.
:param parent: The parent :class:`~.couchbase.bucket.Bucket` object
:param row_factory: Callable which accepts the raw dictionary
of each row, and can wrap them in a customized class.
The default is simply to return the dictionary itself.
To actually receive results of the query, iterate over this
object.
"""
self._body = _to_json(body)
self._parent = parent
self.row_factory = row_factory
self.errors = []
self._mres = None
self._do_iter = True
self.__raw = False
self.__meta_received = False
def _start(self):
if self._mres:
return
self._mres = self._parent._fts_query(self._body)
self.__raw = self._mres[None]
@property
def raw(self):
return self.__raw
@property
def meta(self):
"""
Get metadata from the query itself. This is guaranteed to only
return a Python dictionary.
Note that if the query failed, the metadata might not be in JSON
format, in which case there may be additional, non-JSON data
which can be retrieved using the following
::
raw_meta = req.raw.value
:return: A dictionary containing the query metadata
"""
if not self.__meta_received:
raise RuntimeError(
'This property only valid once all rows are received!')
if isinstance(self.raw.value, dict):
return self.raw.value
return {}
@property
def total_hits(self):
return self.meta['total_hits']
@property
def took(self):
return self.meta['took']
@property
def max_score(self):
return self.meta['max_score']
@property
def facets(self):
return self.meta['facets']
def _clear(self):
del self._parent
del self._mres
def _handle_meta(self, value):
self.__meta_received = True
if not isinstance(value, dict):
return
if 'errors' in value:
for err in value['errors']:
raise SearchError.pyexc('N1QL Execution failed', err)
def _process_payload(self, rows):
if rows:
return [self.row_factory(row) for row in rows]
elif self.raw.done:
self._handle_meta(self.raw.value)
self._do_iter = False
return []
else:
# We can only get here if another concurrent query broke out the
# event loop before we did.
return []
def __iter__(self):
if not self._do_iter:
raise AlreadyQueriedError()
self._start()
while self._do_iter:
raw_rows = self.raw.fetch(self._mres)
for row in self._process_payload(raw_rows):
yield row
def __repr__(self):
return ('<{0.__class__.__name__} '
'body={0._body!r} '
'response={0.raw.value!r}>'.format(self))
| |
import geopandas as gpd
import numpy as np
import pandas as pd
import pytest
from packaging.version import Version
folium = pytest.importorskip("folium")
branca = pytest.importorskip("branca")
matplotlib = pytest.importorskip("matplotlib")
mapclassify = pytest.importorskip("mapclassify")
import matplotlib.cm as cm # noqa
import matplotlib.colors as colors # noqa
from branca.colormap import StepColormap # noqa
BRANCA_05 = Version(branca.__version__) > Version("0.4.2")
class TestExplore:
def setup_method(self):
self.nybb = gpd.read_file(gpd.datasets.get_path("nybb"))
self.world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
self.cities = gpd.read_file(gpd.datasets.get_path("naturalearth_cities"))
self.world["range"] = range(len(self.world))
self.missing = self.world.copy()
np.random.seed(42)
self.missing.loc[np.random.choice(self.missing.index, 40), "continent"] = np.nan
self.missing.loc[np.random.choice(self.missing.index, 40), "pop_est"] = np.nan
def _fetch_map_string(self, m):
out = m._parent.render()
out_str = "".join(out.split())
return out_str
def test_simple_pass(self):
"""Make sure default pass"""
self.nybb.explore()
self.world.explore()
self.cities.explore()
self.world.geometry.explore()
def test_choropleth_pass(self):
"""Make sure default choropleth pass"""
self.world.explore(column="pop_est")
def test_map_settings_default(self):
"""Check default map settings"""
m = self.world.explore()
assert m.location == [
pytest.approx(-3.1774349999999956, rel=1e-6),
pytest.approx(2.842170943040401e-14, rel=1e-6),
]
assert m.options["zoom"] == 10
assert m.options["zoomControl"] is True
assert m.position == "relative"
assert m.height == (100.0, "%")
assert m.width == (100.0, "%")
assert m.left == (0, "%")
assert m.top == (0, "%")
assert m.global_switches.no_touch is False
assert m.global_switches.disable_3d is False
assert "openstreetmap" in m.to_dict()["children"].keys()
def test_map_settings_custom(self):
"""Check custom map settings"""
m = self.nybb.explore(
zoom_control=False,
width=200,
height=200,
)
assert m.location == [
pytest.approx(40.70582377450201, rel=1e-6),
pytest.approx(-73.9778006856748, rel=1e-6),
]
assert m.options["zoom"] == 10
assert m.options["zoomControl"] is False
assert m.height == (200.0, "px")
assert m.width == (200.0, "px")
# custom XYZ tiles
m = self.nybb.explore(
zoom_control=False,
width=200,
height=200,
tiles="https://mt1.google.com/vt/lyrs=m&x={x}&y={y}&z={z}",
attr="Google",
)
out_str = self._fetch_map_string(m)
s = '"https://mt1.google.com/vt/lyrs=m\\u0026x={x}\\u0026y={y}\\u0026z={z}"'
assert s in out_str
assert '"attribution":"Google"' in out_str
m = self.nybb.explore(location=(40, 5))
assert m.location == [40, 5]
assert m.options["zoom"] == 10
m = self.nybb.explore(zoom_start=8)
assert m.location == [
pytest.approx(40.70582377450201, rel=1e-6),
pytest.approx(-73.9778006856748, rel=1e-6),
]
assert m.options["zoom"] == 8
m = self.nybb.explore(location=(40, 5), zoom_start=8)
assert m.location == [40, 5]
assert m.options["zoom"] == 8
def test_simple_color(self):
"""Check color settings"""
# single named color
m = self.nybb.explore(color="red")
out_str = self._fetch_map_string(m)
assert '"fillColor":"red"' in out_str
# list of colors
colors = ["#333333", "#367324", "#95824f", "#fcaa00", "#ffcc33"]
m2 = self.nybb.explore(color=colors)
out_str = self._fetch_map_string(m2)
for c in colors:
assert f'"fillColor":"{c}"' in out_str
# column of colors
df = self.nybb.copy()
df["colors"] = colors
m3 = df.explore(color="colors")
out_str = self._fetch_map_string(m3)
for c in colors:
assert f'"fillColor":"{c}"' in out_str
# line GeoSeries
m4 = self.nybb.boundary.explore(color="red")
out_str = self._fetch_map_string(m4)
assert '"fillColor":"red"' in out_str
def test_choropleth_linear(self):
"""Check choropleth colors"""
# default cmap
m = self.nybb.explore(column="Shape_Leng")
out_str = self._fetch_map_string(m)
assert 'color":"#440154"' in out_str
assert 'color":"#fde725"' in out_str
assert 'color":"#50c46a"' in out_str
assert 'color":"#481467"' in out_str
assert 'color":"#3d4e8a"' in out_str
# named cmap
m = self.nybb.explore(column="Shape_Leng", cmap="PuRd")
out_str = self._fetch_map_string(m)
assert 'color":"#f7f4f9"' in out_str
assert 'color":"#67001f"' in out_str
assert 'color":"#d31760"' in out_str
assert 'color":"#f0ecf5"' in out_str
assert 'color":"#d6bedc"' in out_str
def test_choropleth_mapclassify(self):
"""Mapclassify bins"""
# quantiles
m = self.nybb.explore(column="Shape_Leng", scheme="quantiles")
out_str = self._fetch_map_string(m)
assert 'color":"#21918c"' in out_str
assert 'color":"#3b528b"' in out_str
assert 'color":"#5ec962"' in out_str
assert 'color":"#fde725"' in out_str
assert 'color":"#440154"' in out_str
# headtail
m = self.world.explore(column="pop_est", scheme="headtailbreaks")
out_str = self._fetch_map_string(m)
assert '"fillColor":"#3b528b"' in out_str
assert '"fillColor":"#21918c"' in out_str
assert '"fillColor":"#5ec962"' in out_str
assert '"fillColor":"#fde725"' in out_str
assert '"fillColor":"#440154"' in out_str
# custom k
m = self.world.explore(column="pop_est", scheme="naturalbreaks", k=3)
out_str = self._fetch_map_string(m)
assert '"fillColor":"#21918c"' in out_str
assert '"fillColor":"#fde725"' in out_str
assert '"fillColor":"#440154"' in out_str
def test_categorical(self):
"""Categorical maps"""
# auto detection
m = self.world.explore(column="continent")
out_str = self._fetch_map_string(m)
assert 'color":"#9467bd","continent":"Europe"' in out_str
assert 'color":"#c49c94","continent":"NorthAmerica"' in out_str
assert 'color":"#1f77b4","continent":"Africa"' in out_str
assert 'color":"#98df8a","continent":"Asia"' in out_str
assert 'color":"#ff7f0e","continent":"Antarctica"' in out_str
assert 'color":"#9edae5","continent":"SouthAmerica"' in out_str
assert 'color":"#7f7f7f","continent":"Oceania"' in out_str
assert 'color":"#dbdb8d","continent":"Sevenseas(openocean)"' in out_str
# forced categorical
m = self.nybb.explore(column="BoroCode", categorical=True)
out_str = self._fetch_map_string(m)
assert 'color":"#9edae5"' in out_str
assert 'color":"#c7c7c7"' in out_str
assert 'color":"#8c564b"' in out_str
assert 'color":"#1f77b4"' in out_str
assert 'color":"#98df8a"' in out_str
# pandas.Categorical
df = self.world.copy()
df["categorical"] = pd.Categorical(df["name"])
m = df.explore(column="categorical")
out_str = self._fetch_map_string(m)
for c in np.apply_along_axis(colors.to_hex, 1, cm.tab20(range(20))):
assert f'"fillColor":"{c}"' in out_str
# custom cmap
m = self.nybb.explore(column="BoroName", cmap="Set1")
out_str = self._fetch_map_string(m)
assert 'color":"#999999"' in out_str
assert 'color":"#a65628"' in out_str
assert 'color":"#4daf4a"' in out_str
assert 'color":"#e41a1c"' in out_str
assert 'color":"#ff7f00"' in out_str
# custom list of colors
cmap = ["#333432", "#3b6e8c", "#bc5b4f", "#8fa37e", "#efc758"]
m = self.nybb.explore(column="BoroName", cmap=cmap)
out_str = self._fetch_map_string(m)
for c in cmap:
assert f'"fillColor":"{c}"' in out_str
# shorter list (to make it repeat)
cmap = ["#333432", "#3b6e8c"]
m = self.nybb.explore(column="BoroName", cmap=cmap)
out_str = self._fetch_map_string(m)
for c in cmap:
assert f'"fillColor":"{c}"' in out_str
with pytest.raises(ValueError, match="'cmap' is invalid."):
self.nybb.explore(column="BoroName", cmap="nonsense")
def test_categories(self):
m = self.nybb[["BoroName", "geometry"]].explore(
column="BoroName",
categories=["Brooklyn", "Staten Island", "Queens", "Bronx", "Manhattan"],
)
out_str = self._fetch_map_string(m)
assert '"Bronx","__folium_color":"#c7c7c7"' in out_str
assert '"Manhattan","__folium_color":"#9edae5"' in out_str
assert '"Brooklyn","__folium_color":"#1f77b4"' in out_str
assert '"StatenIsland","__folium_color":"#98df8a"' in out_str
assert '"Queens","__folium_color":"#8c564b"' in out_str
df = self.nybb.copy()
df["categorical"] = pd.Categorical(df["BoroName"])
with pytest.raises(ValueError, match="Cannot specify 'categories'"):
df.explore("categorical", categories=["Brooklyn", "Staten Island"])
def test_column_values(self):
"""
Check that the dataframe plot method returns same values with an
input string (column in df), pd.Series, or np.array
"""
column_array = np.array(self.world["pop_est"])
m1 = self.world.explore(column="pop_est") # column name
m2 = self.world.explore(column=column_array) # np.array
m3 = self.world.explore(column=self.world["pop_est"]) # pd.Series
assert m1.location == m2.location == m3.location
m1_fields = self.world.explore(column=column_array, tooltip=True, popup=True)
out1_fields_str = self._fetch_map_string(m1_fields)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out1_fields_str
)
assert (
'aliases=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out1_fields_str
)
m2_fields = self.world.explore(
column=self.world["pop_est"], tooltip=True, popup=True
)
out2_fields_str = self._fetch_map_string(m2_fields)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out2_fields_str
)
assert (
'aliases=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out2_fields_str
)
# GeoDataframe and the given list have different number of rows
with pytest.raises(ValueError, match="different number of rows"):
self.world.explore(column=np.array([1, 2, 3]))
def test_no_crs(self):
"""Naive geometry get no tiles"""
df = self.world.copy()
df.crs = None
m = df.explore()
assert "openstreetmap" not in m.to_dict()["children"].keys()
def test_style_kwds(self):
"""Style keywords"""
m = self.world.explore(
style_kwds=dict(fillOpacity=0.1, weight=0.5, fillColor="orange")
)
out_str = self._fetch_map_string(m)
assert '"fillColor":"orange","fillOpacity":0.1,"weight":0.5' in out_str
m = self.world.explore(column="pop_est", style_kwds=dict(color="black"))
assert '"color":"black"' in self._fetch_map_string(m)
def test_tooltip(self):
"""Test tooltip"""
# default with no tooltip or popup
m = self.world.explore()
assert "GeoJsonTooltip" in str(m.to_dict())
assert "GeoJsonPopup" not in str(m.to_dict())
# True
m = self.world.explore(tooltip=True, popup=True)
assert "GeoJsonTooltip" in str(m.to_dict())
assert "GeoJsonPopup" in str(m.to_dict())
out_str = self._fetch_map_string(m)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
assert (
'aliases=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
# True choropleth
m = self.world.explore(column="pop_est", tooltip=True, popup=True)
assert "GeoJsonTooltip" in str(m.to_dict())
assert "GeoJsonPopup" in str(m.to_dict())
out_str = self._fetch_map_string(m)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
assert (
'aliases=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
# single column
m = self.world.explore(tooltip="pop_est", popup="iso_a3")
out_str = self._fetch_map_string(m)
assert 'fields=["pop_est"]' in out_str
assert 'aliases=["pop_est"]' in out_str
assert 'fields=["iso_a3"]' in out_str
assert 'aliases=["iso_a3"]' in out_str
# list
m = self.world.explore(
tooltip=["pop_est", "continent"], popup=["iso_a3", "gdp_md_est"]
)
out_str = self._fetch_map_string(m)
assert 'fields=["pop_est","continent"]' in out_str
assert 'aliases=["pop_est","continent"]' in out_str
assert 'fields=["iso_a3","gdp_md_est"' in out_str
assert 'aliases=["iso_a3","gdp_md_est"]' in out_str
# number
m = self.world.explore(tooltip=2, popup=2)
out_str = self._fetch_map_string(m)
assert 'fields=["pop_est","continent"]' in out_str
assert 'aliases=["pop_est","continent"]' in out_str
# keywords tooltip
m = self.world.explore(
tooltip=True,
popup=False,
tooltip_kwds=dict(aliases=[0, 1, 2, 3, 4, 5], sticky=False),
)
out_str = self._fetch_map_string(m)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
assert "aliases=[0,1,2,3,4,5]" in out_str
assert '"sticky":false' in out_str
# keywords popup
m = self.world.explore(
tooltip=False,
popup=True,
popup_kwds=dict(aliases=[0, 1, 2, 3, 4, 5]),
)
out_str = self._fetch_map_string(m)
assert (
'fields=["pop_est","continent","name","iso_a3","gdp_md_est","range"]'
in out_str
)
assert "aliases=[0,1,2,3,4,5]" in out_str
assert "<th>${aliases[i]" in out_str
# no labels
m = self.world.explore(
tooltip=True,
popup=True,
tooltip_kwds=dict(labels=False),
popup_kwds=dict(labels=False),
)
out_str = self._fetch_map_string(m)
assert "<th>${aliases[i]" not in out_str
# named index
gdf = self.nybb.set_index("BoroName")
m = gdf.explore()
out_str = self._fetch_map_string(m)
assert "BoroName" in out_str
def test_default_markers(self):
# check overridden default for points
m = self.cities.explore()
strings = ['"radius":2', '"fill":true', "CircleMarker(latlng,opts)"]
out_str = self._fetch_map_string(m)
for s in strings:
assert s in out_str
m = self.cities.explore(marker_kwds=dict(radius=5, fill=False))
strings = ['"radius":5', '"fill":false', "CircleMarker(latlng,opts)"]
out_str = self._fetch_map_string(m)
for s in strings:
assert s in out_str
def test_custom_markers(self):
# Markers
m = self.cities.explore(
marker_type="marker",
marker_kwds={"icon": folium.Icon(icon="star")},
)
assert ""","icon":"star",""" in self._fetch_map_string(m)
# Circle Markers
m = self.cities.explore(marker_type="circle", marker_kwds={"fill_color": "red"})
assert ""","fillColor":"red",""" in self._fetch_map_string(m)
# Folium Markers
m = self.cities.explore(
marker_type=folium.Circle(
radius=4, fill_color="orange", fill_opacity=0.4, color="black", weight=1
),
)
assert ""","color":"black",""" in self._fetch_map_string(m)
# Circle
m = self.cities.explore(marker_type="circle_marker", marker_kwds={"radius": 10})
assert ""","radius":10,""" in self._fetch_map_string(m)
# Unsupported Markers
with pytest.raises(
ValueError,
match="Only 'marker', 'circle', and 'circle_marker' are supported",
):
self.cities.explore(marker_type="dummy")
def test_vmin_vmax(self):
df = self.world.copy()
df["range"] = range(len(df))
m = df.explore("range", vmin=-100, vmax=1000)
out_str = self._fetch_map_string(m)
assert 'case"176":return{"color":"#3b528b","fillColor":"#3b528b"' in out_str
assert 'case"119":return{"color":"#414287","fillColor":"#414287"' in out_str
assert 'case"3":return{"color":"#482173","fillColor":"#482173"' in out_str
# test 0
df2 = self.nybb.copy()
df2["values"] = df2["BoroCode"] * 10.0
m = df2[df2["values"] >= 30].explore("values", vmin=0)
out_str = self._fetch_map_string(m)
assert 'case"1":return{"color":"#7ad151","fillColor":"#7ad151"' in out_str
assert 'case"2":return{"color":"#22a884","fillColor":"#22a884"' in out_str
df2["values_negative"] = df2["BoroCode"] * -10.0
m = df2[df2["values_negative"] <= 30].explore("values_negative", vmax=0)
out_str = self._fetch_map_string(m)
assert 'case"1":return{"color":"#414487","fillColor":"#414487"' in out_str
assert 'case"2":return{"color":"#2a788e","fillColor":"#2a788e"' in out_str
def test_missing_vals(self):
m = self.missing.explore("continent")
assert '"fillColor":null' in self._fetch_map_string(m)
m = self.missing.explore("pop_est")
assert '"fillColor":null' in self._fetch_map_string(m)
m = self.missing.explore("pop_est", missing_kwds=dict(color="red"))
assert '"fillColor":"red"' in self._fetch_map_string(m)
m = self.missing.explore("continent", missing_kwds=dict(color="red"))
assert '"fillColor":"red"' in self._fetch_map_string(m)
def test_categorical_legend(self):
m = self.world.explore("continent", legend=True)
out_str = self._fetch_map_string(m)
assert "#1f77b4'></span>Africa" in out_str
assert "#ff7f0e'></span>Antarctica" in out_str
assert "#98df8a'></span>Asia" in out_str
assert "#9467bd'></span>Europe" in out_str
assert "#c49c94'></span>NorthAmerica" in out_str
assert "#7f7f7f'></span>Oceania" in out_str
assert "#dbdb8d'></span>Sevenseas(openocean)" in out_str
assert "#9edae5'></span>SouthAmerica" in out_str
m = self.missing.explore(
"continent", legend=True, missing_kwds={"color": "red"}
)
out_str = self._fetch_map_string(m)
assert "red'></span>NaN" in out_str
def test_colorbar(self):
m = self.world.explore("range", legend=True)
out_str = self._fetch_map_string(m)
assert "attr(\"id\",'legend')" in out_str
assert "text('range')" in out_str
m = self.world.explore(
"range", legend=True, legend_kwds=dict(caption="my_caption")
)
out_str = self._fetch_map_string(m)
assert "attr(\"id\",'legend')" in out_str
assert "text('my_caption')" in out_str
m = self.missing.explore("pop_est", legend=True, missing_kwds=dict(color="red"))
out_str = self._fetch_map_string(m)
assert "red'></span>NaN" in out_str
# do not scale legend
m = self.world.explore(
"pop_est",
legend=True,
legend_kwds=dict(scale=False),
scheme="Headtailbreaks",
)
out_str = self._fetch_map_string(m)
assert out_str.count("#440154ff") == 100
assert out_str.count("#3b528bff") == 100
assert out_str.count("#21918cff") == 100
assert out_str.count("#5ec962ff") == 100
assert out_str.count("#fde725ff") == 100
# scale legend accordingly
m = self.world.explore(
"pop_est",
legend=True,
scheme="Headtailbreaks",
)
out_str = self._fetch_map_string(m)
assert out_str.count("#440154ff") == 16
assert out_str.count("#3b528bff") == 51
assert out_str.count("#21918cff") == 133
assert out_str.count("#5ec962ff") == 282
assert out_str.count("#fde725ff") == 18
# discrete cmap
m = self.world.explore("pop_est", legend=True, cmap="Pastel2")
out_str = self._fetch_map_string(m)
assert out_str.count("b3e2cdff") == 63
assert out_str.count("fdcdacff") == 62
assert out_str.count("cbd5e8ff") == 63
assert out_str.count("f4cae4ff") == 62
assert out_str.count("e6f5c9ff") == 62
assert out_str.count("fff2aeff") == 63
assert out_str.count("f1e2ccff") == 62
assert out_str.count("ccccccff") == 63
@pytest.mark.skipif(not BRANCA_05, reason="requires branca >= 0.5.0")
def test_colorbar_max_labels(self):
# linear
m = self.world.explore("pop_est", legend_kwds=dict(max_labels=3))
out_str = self._fetch_map_string(m)
tick_values = [140.0, 465176713.5921569, 930353287.1843138]
for tick in tick_values:
assert str(tick) in out_str
# scheme
m = self.world.explore(
"pop_est", scheme="headtailbreaks", legend_kwds=dict(max_labels=3)
)
out_str = self._fetch_map_string(m)
assert "tickValues([140,'',182567501.0,'',1330619341.0,''])" in out_str
# short cmap
m = self.world.explore("pop_est", legend_kwds=dict(max_labels=3), cmap="tab10")
out_str = self._fetch_map_string(m)
tick_values = [140.0, 551721192.4, 1103442244.8]
for tick in tick_values:
assert str(tick) in out_str
def test_xyzservices_providers(self):
xyzservices = pytest.importorskip("xyzservices")
m = self.nybb.explore(tiles=xyzservices.providers.CartoDB.PositronNoLabels)
out_str = self._fetch_map_string(m)
assert (
'"https://a.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}{r}.png"'
in out_str
)
assert (
'attribution":"\\u0026copy;\\u003cahref=\\"https://www.openstreetmap.org'
in out_str
)
assert '"maxNativeZoom":20,"maxZoom":20,"minZoom":0' in out_str
def test_xyzservices_query_name(self):
pytest.importorskip("xyzservices")
m = self.nybb.explore(tiles="CartoDB Positron No Labels")
out_str = self._fetch_map_string(m)
assert (
'"https://a.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}{r}.png"'
in out_str
)
assert (
'attribution":"\\u0026copy;\\u003cahref=\\"https://www.openstreetmap.org'
in out_str
)
assert '"maxNativeZoom":20,"maxZoom":20,"minZoom":0' in out_str
def test_linearrings(self):
rings = self.nybb.explode(index_parts=True).exterior
m = rings.explore()
out_str = self._fetch_map_string(m)
assert out_str.count("LineString") == len(rings)
def test_mapclassify_categorical_legend(self):
m = self.missing.explore(
column="pop_est",
legend=True,
scheme="naturalbreaks",
missing_kwds=dict(color="red", label="missing"),
legend_kwds=dict(colorbar=False, interval=True),
)
out_str = self._fetch_map_string(m)
strings = [
"[140.00,33986655.00]",
"(33986655.00,105350020.00]",
"(105350020.00,207353391.00]",
"(207353391.00,326625791.00]",
"(326625791.00,1379302771.00]",
"missing",
]
for s in strings:
assert s in out_str
# interval=False
m = self.missing.explore(
column="pop_est",
legend=True,
scheme="naturalbreaks",
missing_kwds=dict(color="red", label="missing"),
legend_kwds=dict(colorbar=False, interval=False),
)
out_str = self._fetch_map_string(m)
strings = [
">140.00,33986655.00",
">33986655.00,105350020.00",
">105350020.00,207353391.00",
">207353391.00,326625791.00",
">326625791.00,1379302771.00",
"missing",
]
for s in strings:
assert s in out_str
# custom labels
m = self.world.explore(
column="pop_est",
legend=True,
scheme="naturalbreaks",
k=5,
legend_kwds=dict(colorbar=False, labels=["s", "m", "l", "xl", "xxl"]),
)
out_str = self._fetch_map_string(m)
strings = [">s<", ">m<", ">l<", ">xl<", ">xxl<"]
for s in strings:
assert s in out_str
# fmt
m = self.missing.explore(
column="pop_est",
legend=True,
scheme="naturalbreaks",
missing_kwds=dict(color="red", label="missing"),
legend_kwds=dict(colorbar=False, fmt="{:.0f}"),
)
out_str = self._fetch_map_string(m)
strings = [
">140,33986655",
">33986655,105350020",
">105350020,207353391",
">207353391,326625791",
">326625791,1379302771",
"missing",
]
for s in strings:
assert s in out_str
def test_given_m(self):
"Check that geometry is mapped onto a given folium.Map"
m = folium.Map()
self.nybb.explore(m=m, tooltip=False, highlight=False)
out_str = self._fetch_map_string(m)
assert out_str.count("BoroCode") == 5
# should not change map settings
assert m.options["zoom"] == 1
def test_highlight(self):
m = self.nybb.explore(highlight=True)
out_str = self._fetch_map_string(m)
assert '"fillOpacity":0.75' in out_str
m = self.nybb.explore(
highlight=True, highlight_kwds=dict(fillOpacity=1, color="red")
)
out_str = self._fetch_map_string(m)
assert '{"color":"red","fillOpacity":1}' in out_str
def test_custom_colormaps(self):
step = StepColormap(["green", "yellow", "red"], vmin=0, vmax=100000000)
m = self.world.explore("pop_est", cmap=step, tooltip=["name"], legend=True)
strings = [
'fillColor":"#008000ff"', # Green
'"fillColor":"#ffff00ff"', # Yellow
'"fillColor":"#ff0000ff"', # Red
]
out_str = self._fetch_map_string(m)
for s in strings:
assert s in out_str
assert out_str.count("008000ff") == 306
assert out_str.count("ffff00ff") == 187
assert out_str.count("ff0000ff") == 190
# Using custom function colormap
def my_color_function(field):
"""Maps low values to green and high values to red."""
if field > 100000000:
return "#ff0000"
else:
return "#008000"
m = self.world.explore("pop_est", cmap=my_color_function, legend=False)
strings = [
'"color":"#ff0000","fillColor":"#ff0000"',
'"color":"#008000","fillColor":"#008000"',
]
for s in strings:
assert s in self._fetch_map_string(m)
# matplotlib.Colormap
cmap = colors.ListedColormap(["red", "green", "blue", "white", "black"])
m = self.nybb.explore("BoroName", cmap=cmap)
strings = [
'"fillColor":"#ff0000"', # Red
'"fillColor":"#008000"', # Green
'"fillColor":"#0000ff"', # Blue
'"fillColor":"#ffffff"', # White
'"fillColor":"#000000"', # Black
]
out_str = self._fetch_map_string(m)
for s in strings:
assert s in out_str
def test_multiple_geoseries(self):
"""
Additional GeoSeries need to be removed as they cannot be converted to GeoJSON
"""
gdf = self.nybb
gdf["boundary"] = gdf.boundary
gdf["centroid"] = gdf.centroid
gdf.explore()
def test_map_kwds(self):
def check():
out_str = self._fetch_map_string(m)
assert "zoomControl:false" in out_str
assert "dragging:false" in out_str
assert "scrollWheelZoom:false" in out_str
# check that folium and leaflet Map() parameters can be passed
m = self.world.explore(
zoom_control=False, map_kwds=dict(dragging=False, scrollWheelZoom=False)
)
check()
with pytest.raises(
ValueError, match="'zoom_control' cannot be specified in 'map_kwds'"
):
self.world.explore(
map_kwds=dict(dragging=False, scrollWheelZoom=False, zoom_control=False)
)
| |
#!/usr/bin/env python
__author__ = 'Thomas R. Lennan, Michael Meisinger'
__license__ = 'Apache 2.0'
from nose.plugins.attrib import attr
import gevent
from pyon.util.unit_test import IonUnitTestCase
from pyon.core.bootstrap import CFG
from pyon.core.exception import BadRequest, NotFound
from pyon.datastore.datastore import DatastoreManager
from pyon.ion.directory import Directory
from interface.objects import DirEntry
@attr('UNIT', group='datastore')
class TestDirectory(IonUnitTestCase):
def test_directory(self):
dsm = DatastoreManager()
ds = dsm.get_datastore("resources", "DIRECTORY")
ds.delete_datastore()
ds.create_datastore()
self.patch_cfg('pyon.ion.directory.CFG', {'service': {'directory': {'publish_events': False}}})
directory = Directory(datastore_manager=dsm)
directory.start()
#self.addCleanup(directory.dir_store.delete_datastore)
objs = directory.dir_store.list_objects()
root = directory.lookup("/DIR")
self.assert_(root is not None)
entry = directory.lookup("/temp")
self.assert_(entry is None)
entry_old = directory.register("/", "temp")
self.assertEquals(entry_old, None)
# Create a node
entry = directory.lookup("/temp")
self.assertEquals(entry, {} )
# The create case
entry_old = directory.register("/temp", "entry1", foo="awesome")
self.assertEquals(entry_old, None)
entry_new = directory.lookup("/temp/entry1")
self.assertEquals(entry_new, {"foo":"awesome"})
# The update case
entry_old = directory.register("/temp", "entry1", foo="ingenious")
self.assertEquals(entry_old, {"foo": "awesome"})
# The delete case
entry_old = directory.unregister("/temp", "entry1")
self.assertEquals(entry_old, {"foo": "ingenious"})
entry_new = directory.lookup("/temp/entry1")
self.assertEquals(entry_new, None)
directory.register("/BranchA", "X", resource_id="rid1")
directory.register("/BranchA", "Y", resource_id="rid2")
directory.register("/BranchA", "Z", resource_id="rid3")
directory.register("/BranchA/X", "a", resource_id="rid4")
directory.register("/BranchA/X", "b", resource_id="rid5")
directory.register("/BranchB", "k", resource_id="rid6")
directory.register("/BranchB", "l", resource_id="rid7")
directory.register("/BranchB/k", "m", resource_id="rid7")
directory.register("/BranchB/k", "X")
res_list = directory.find_by_value("/", attribute="resource_id", value="rid3")
self.assertEquals(len(res_list), 1)
self.assertEquals(res_list[0].org, "ION")
self.assertEquals(res_list[0].parent, "/BranchA")
self.assertEquals(res_list[0].key, "Z")
res_list = directory.find_by_value("/", attribute="resource_id", value="rid34")
self.assertEquals(len(res_list), 0)
res_list = directory.find_by_value("/", attribute="resource_id", value="rid7")
self.assertEquals(len(res_list), 2)
res_list = directory.find_by_value("/BranchB", attribute="resource_id", value="rid7")
self.assertEquals(len(res_list), 2)
res_list = directory.find_by_value("/Branch", attribute="resource_id", value="rid7")
self.assertEquals(len(res_list), 2)
res_list = directory.find_by_value("/BranchB/k", attribute="resource_id", value="rid7")
self.assertEquals(len(res_list), 1)
res_list = directory.find_child_entries("/BranchB/k/m")
self.assertEquals(len(res_list), 0)
res_list = directory.find_child_entries("/BranchB")
self.assertEquals(len(res_list), 2)
res_list = directory.find_child_entries("/BranchB/k/m", direct_only=False)
self.assertEquals(len(res_list), 0)
res_list = directory.find_child_entries("/BranchB", direct_only=False)
self.assertEquals(len(res_list), 4)
res_list = directory.find_by_key("X")
self.assertEquals(len(res_list), 2)
res_list = directory.find_by_key("X", parent="/BranchB")
self.assertEquals(len(res_list), 1)
entry_list = directory.lookup_mult("/BranchA", ["X", "Z"])
self.assertEquals(len(entry_list), 2)
self.assertEquals(entry_list[0]["resource_id"], "rid1")
self.assertEquals(entry_list[1]["resource_id"], "rid3")
entry_list = directory.lookup_mult("/BranchA", ["Y", "FOO"])
self.assertEquals(len(entry_list), 2)
self.assertEquals(entry_list[0]["resource_id"], "rid2")
self.assertEquals(entry_list[1], None)
# Test prevent duplicate entries
directory.register("/some", "dupentry", foo="ingenious")
de = directory.lookup("/some/dupentry", return_entry=True)
de1_attrs = de.__dict__.copy()
del de1_attrs["_id"]
del de1_attrs["_rev"]
del de1_attrs["type_"]
de1 = DirEntry(**de1_attrs)
with self.assertRaises(BadRequest) as ex:
de_id1,_ = directory.dir_store.create(de1)
self.assertTrue(ex.message.startswith("DirEntry already exists"))
res_list = directory.find_by_key("dupentry", parent="/some")
self.assertEquals(1, len(res_list))
def test_directory_lock(self):
dsm = DatastoreManager()
ds = dsm.get_datastore("resources", "DIRECTORY")
ds.delete_datastore()
ds.create_datastore()
self.patch_cfg('pyon.ion.directory.CFG', {'service': {'directory': {'publish_events': False}}})
directory = Directory(datastore_manager=dsm)
directory.start()
lock1 = directory.acquire_lock("LOCK1", lock_info=dict(process="proc1"))
self.assertEquals(lock1, True)
lock2 = directory.acquire_lock("LOCK1", lock_info=dict(process="proc2"))
self.assertEquals(lock2, False)
with self.assertRaises(BadRequest):
directory.acquire_lock("LOCK/SOME")
with self.assertRaises(BadRequest):
directory.release_lock("LOCK/SOME")
with self.assertRaises(NotFound):
directory.release_lock("LOCK2")
directory.release_lock("LOCK1")
lock1 = directory.acquire_lock("LOCK1", lock_info=dict(process="proc3"))
self.assertEquals(lock1, True)
# TEST: With lock holders
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc1")
self.assertEquals(lock5, True)
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc1")
self.assertEquals(lock5, True)
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc2")
self.assertEquals(lock5, False)
directory.release_lock("LOCK5")
# TEST: Timeout
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc1", timeout=100)
self.assertEquals(lock5, True)
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc2")
self.assertEquals(lock5, False)
res = directory.is_locked("LOCK5")
self.assertEquals(res, True)
gevent.sleep(0.15)
res = directory.is_locked("LOCK5")
self.assertEquals(res, False)
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc2", timeout=100)
self.assertEquals(lock5, True)
gevent.sleep(0.15)
# TEST: Holder self renew
lock5 = directory.acquire_lock("LOCK5", lock_holder="proc2", timeout=100)
self.assertEquals(lock5, True)
directory.stop()
| |
from __future__ import print_function, unicode_literals
import importlib
import os
import sys
from django.apps import apps
from django.db.models.fields import NOT_PROVIDED
from django.utils import datetime_safe, six, timezone
from django.utils.six.moves import input
from .loader import MigrationLoader
class MigrationQuestioner(object):
"""
Gives the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"Should we create an initial migration for the app?"
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path, _ = MigrationLoader.migrations_module(app_config.label)
if migrations_import_path is None:
# It's an application with migrations disabled.
return self.defaults.get("ask_initial", False)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if hasattr(migrations_module, "__file__"):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
# None means quit
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"Do you really want to merge these migrations?"
return self.defaults.get("ask_merge", False)
def ask_auto_now_add_addition(self, field_name, model_name):
"Adding an auto_now_add field to a model"
# None means quit
return None
class InteractiveMigrationQuestioner(MigrationQuestioner):
def _boolean_input(self, question, default=None):
result = input("%s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def _choice_input(self, question, choices):
print(question)
for i, choice in enumerate(choices):
print(" %s) %s" % (i + 1, choice))
result = input("Select an option: ")
while True:
try:
value = int(result)
if 0 < value <= len(choices):
return value
except ValueError:
pass
result = input("Please select a valid option: ")
def _ask_default(self, default=''):
"""
Prompt for a default value.
The ``default`` argument allows providing a custom default value (as a
string) which will be shown to the user and used as the return value
if the user doesn't provide any other input.
"""
print("Please enter the default value now, as valid Python")
if default:
print(
"You can accept the default '{}' by pressing 'Enter' or you "
"can provide another value.".format(default)
)
print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now")
print("Type 'exit' to exit this prompt")
while True:
if default:
prompt = "[default: {}] >>> ".format(default)
else:
prompt = ">>> "
if six.PY3:
# Six does not correctly abstract over the fact that
# py3 input returns a unicode string, while py2 raw_input
# returns a bytestring.
code = input(prompt)
else:
code = input(prompt).decode(sys.stdin.encoding)
if not code and default:
code = default
if not code:
print("Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone})
except (SyntaxError, NameError) as e:
print("Invalid input: %s" % e)
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add a non-nullable field '%s' to %s without a default; "
"we can't do that (the database needs something to populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
("Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)"),
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
if not self.dry_run:
choice = self._choice_input(
"You are trying to change the nullable field '%s' on %s to non-nullable "
"without a default; we can't do that (the database needs something to "
"populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
("Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)"),
("Ignore for now, and let me handle existing rows with NULL myself "
"(e.g. because you added a RunPython or RunSQL operation to handle "
"NULL values in a previous data migration)"),
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
return NOT_PROVIDED
elif choice == 3:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
field_instance.__class__.__name__), False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
msg = "Did you rename the %s.%s model to %s? [y/N]"
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
new_model_state.name), False)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n" +
"with each other (working on different fields or models)\n" +
"Do you want to merge these migration branches? [y/N]",
False,
)
def ask_auto_now_add_addition(self, field_name, model_name):
"Adding an auto_now_add field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add the field '{}' with 'auto_now_add=True' "
"to {} without a default; the database needs something to "
"populate existing rows.\n".format(field_name, model_name),
[
"Provide a one-off default now (will be set on all "
"existing rows)",
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default(default='timezone.now')
return None
class NonInteractiveMigrationQuestioner(MigrationQuestioner):
def ask_not_null_addition(self, field_name, model_name):
# We can't ask the user, so act like the user aborted.
sys.exit(3)
def ask_not_null_alteration(self, field_name, model_name):
# We can't ask the user, so set as not provided.
return NOT_PROVIDED
def ask_auto_now_add_addition(self, field_name, model_name):
# We can't ask the user, so act like the user aborted.
sys.exit(3)
| |
#!/usr/bin/env python
#
# Copyright 2013, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
"""This test covers a resharding scenario of an already sharded keyspace.
We start with shards -80 and 80-. We then split 80- into 80-c0 and c0-.
This test is the main resharding test. It not only tests the regular resharding
workflow for an horizontal split, but also a lot of error cases and side
effects, like:
- migrating the traffic one cell at a time.
- migrating rdonly traffic back and forth.
- making sure we can't migrate the master until replica and rdonly are migrated.
- has a background thread to insert data during migration.
- tests a destination shard master failover while replication is running.
- tests a filtered replication source replacement while filtered replication
is running.
- tests 'vtctl SourceShardAdd' and 'vtctl SourceShardDelete'.
- makes sure the key range rules are properly enforced on masters.
"""
import threading
import time
import logging
import unittest
import base_sharding
import environment
import tablet
import utils
from vtproto import topodata_pb2
from vtdb import keyrange_constants
# initial shards
# range '' - 80
shard_0_master = tablet.Tablet()
shard_0_replica = tablet.Tablet()
shard_0_ny_rdonly = tablet.Tablet(cell='ny')
# range 80 - ''
shard_1_master = tablet.Tablet()
shard_1_slave1 = tablet.Tablet()
shard_1_slave2 = tablet.Tablet()
shard_1_ny_rdonly = tablet.Tablet(cell='ny')
shard_1_rdonly1 = tablet.Tablet()
# split shards
# range 80 - c0
shard_2_master = tablet.Tablet()
shard_2_replica1 = tablet.Tablet()
shard_2_replica2 = tablet.Tablet()
shard_2_rdonly1 = tablet.Tablet()
# range c0 - ''
shard_3_master = tablet.Tablet()
shard_3_replica = tablet.Tablet()
shard_3_rdonly1 = tablet.Tablet()
all_tablets = [shard_0_master, shard_0_replica, shard_0_ny_rdonly,
shard_1_master, shard_1_slave1, shard_1_slave2,
shard_1_ny_rdonly, shard_1_rdonly1,
shard_2_master, shard_2_replica1, shard_2_replica2,
shard_2_rdonly1,
shard_3_master, shard_3_replica, shard_3_rdonly1]
def setUpModule():
try:
environment.topo_server().setup()
setup_procs = [t.init_mysql(use_rbr=base_sharding.use_rbr)
for t in all_tablets]
utils.Vtctld().start()
utils.wait_procs(setup_procs)
except:
tearDownModule()
raise
def tearDownModule():
utils.required_teardown()
if utils.options.skip_teardown:
return
teardown_procs = [t.teardown_mysql() for t in all_tablets]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
for t in all_tablets:
t.remove_tree()
# InsertThread will insert a value into the timestamps table, and then
# every 1/5s will update its value with the current timestamp
class InsertThread(threading.Thread):
def __init__(self, tablet_obj, thread_name, thread_id, user_id,
keyspace_id):
threading.Thread.__init__(self)
self.tablet = tablet_obj
self.thread_name = thread_name
self.thread_id = thread_id
self.user_id = user_id
self.keyspace_id = keyspace_id
self.str_keyspace_id = utils.uint64_to_hex(keyspace_id)
self.done = False
self.tablet.mquery(
'vt_test_keyspace',
['begin',
'insert into timestamps(id, time_milli, custom_ksid_col) '
'values(%d, %d, 0x%x) '
'/* vtgate:: keyspace_id:%s */ /* user_id:%d */' %
(self.thread_id, long(time.time() * 1000), self.keyspace_id,
self.str_keyspace_id, self.user_id),
'commit'],
write=True, user='vt_app')
self.start()
def run(self):
try:
while not self.done:
self.tablet.mquery(
'vt_test_keyspace',
['begin',
'update timestamps set time_milli=%d '
'where id=%d /* vtgate:: keyspace_id:%s */ /* user_id:%d */' %
(long(time.time() * 1000), self.thread_id,
self.str_keyspace_id, self.user_id),
'commit'],
write=True, user='vt_app')
time.sleep(0.2)
except Exception: # pylint: disable=broad-except
logging.exception('InsertThread got exception.')
# MonitorLagThread will get values from a database, and compare the timestamp
# to evaluate lag. Since the qps is really low, and we send binlogs as chunks,
# the latency is pretty high (a few seconds).
class MonitorLagThread(threading.Thread):
def __init__(self, tablet_obj, thread_name, thread_id):
threading.Thread.__init__(self)
self.tablet = tablet_obj
self.thread_name = thread_name
self.thread_id = thread_id
self.done = False
self.max_lag_ms = 0
self.lag_sum_ms = 0
self.sample_count = 0
self.start()
def run(self):
try:
while not self.done:
result = self.tablet.mquery(
'vt_test_keyspace',
'select time_milli from timestamps where id=%d' %
self.thread_id)
if result:
lag_ms = long(time.time() * 1000) - long(result[0][0])
logging.debug('MonitorLagThread(%s) got %d ms',
self.thread_name, lag_ms)
self.sample_count += 1
self.lag_sum_ms += lag_ms
if lag_ms > self.max_lag_ms:
self.max_lag_ms = lag_ms
time.sleep(1.0)
except Exception: # pylint: disable=broad-except
logging.exception('MonitorLagThread got exception.')
class TestResharding(unittest.TestCase, base_sharding.BaseShardingTest):
# create_schema will create the same schema on the keyspace
# then insert some values
def _create_schema(self):
if base_sharding.keyspace_id_type == keyrange_constants.KIT_BYTES:
t = 'varbinary(64)'
else:
t = 'bigint(20) unsigned'
# Note that the primary key columns are not defined first on purpose to test
# that a reordered column list is correctly used everywhere in vtworker.
create_table_template = '''create table %s(
custom_ksid_col ''' + t + ''' not null,
msg varchar(64),
id bigint not null,
parent_id bigint not null,
primary key (parent_id, id),
index by_msg (msg)
) Engine=InnoDB'''
create_view_template = (
'create view %s'
'(parent_id, id, msg, custom_ksid_col)'
'as select parent_id, id, msg, custom_ksid_col '
'from %s')
create_timestamp_table = '''create table timestamps(
id int not null,
time_milli bigint(20) unsigned not null,
custom_ksid_col ''' + t + ''' not null,
primary key (id)
) Engine=InnoDB'''
create_unrelated_table = '''create table unrelated(
name varchar(64),
primary key (name)
) Engine=InnoDB'''
utils.run_vtctl(['ApplySchema',
'-sql=' + create_table_template % ('resharding1'),
'test_keyspace'],
auto_log=True)
utils.run_vtctl(['ApplySchema',
'-sql=' + create_table_template % ('resharding2'),
'test_keyspace'],
auto_log=True)
utils.run_vtctl(['ApplySchema',
'-sql=' + create_view_template % ('view1', 'resharding1'),
'test_keyspace'],
auto_log=True)
utils.run_vtctl(['ApplySchema',
'-sql=' + create_timestamp_table,
'test_keyspace'],
auto_log=True)
utils.run_vtctl(['ApplySchema',
'-sql=' + create_unrelated_table,
'test_keyspace'],
auto_log=True)
def _insert_startup_values(self):
self._insert_value(shard_0_master, 'resharding1', 1, 'msg1',
0x1000000000000000)
self._insert_value(shard_1_master, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._insert_value(shard_1_master, 'resharding1', 3, 'msg3',
0xD000000000000000)
def _check_startup_values(self):
# check first value is in the right shard
self._check_value(shard_2_master, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._check_value(shard_2_replica1, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._check_value(shard_2_replica2, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._check_value(shard_2_rdonly1, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._check_value(shard_3_master, 'resharding1', 2, 'msg2',
0x9000000000000000, should_be_here=False)
self._check_value(shard_3_replica, 'resharding1', 2, 'msg2',
0x9000000000000000, should_be_here=False)
self._check_value(shard_3_rdonly1, 'resharding1', 2, 'msg2',
0x9000000000000000, should_be_here=False)
# check second value is in the right shard too
self._check_value(shard_2_master, 'resharding1', 3, 'msg3',
0xD000000000000000, should_be_here=False)
self._check_value(shard_2_replica1, 'resharding1', 3, 'msg3',
0xD000000000000000, should_be_here=False)
self._check_value(shard_2_replica2, 'resharding1', 3, 'msg3',
0xD000000000000000, should_be_here=False)
self._check_value(shard_2_rdonly1, 'resharding1', 3, 'msg3',
0xD000000000000000, should_be_here=False)
self._check_value(shard_3_master, 'resharding1', 3, 'msg3',
0xD000000000000000)
self._check_value(shard_3_replica, 'resharding1', 3, 'msg3',
0xD000000000000000)
self._check_value(shard_3_rdonly1, 'resharding1', 3, 'msg3',
0xD000000000000000)
def _insert_lots(self, count, base=0):
for i in xrange(count):
self._insert_value(shard_1_master, 'resharding1', 10000 + base + i,
'msg-range1-%d' % i, 0xA000000000000000 + base + i)
self._insert_value(shard_1_master, 'resharding1', 20000 + base + i,
'msg-range2-%d' % i, 0xE000000000000000 + base + i)
def _exec_multi_shard_dmls(self):
mids = [10000001, 10000002, 10000003]
msg_ids = ['msg-id10000001', 'msg-id10000002', 'msg-id10000003']
keyspace_ids = [0x9000000000000000, 0xD000000000000000,
0xE000000000000000]
self._insert_multi_value(shard_1_master, 'resharding1', mids,
msg_ids, keyspace_ids)
mids = [10000004, 10000005]
msg_ids = ['msg-id10000004', 'msg-id10000005']
keyspace_ids = [0xD000000000000000, 0xE000000000000000]
self._insert_multi_value(shard_1_master, 'resharding1', mids,
msg_ids, keyspace_ids)
mids = [10000011, 10000012, 10000013]
msg_ids = ['msg-id10000011', 'msg-id10000012', 'msg-id10000013']
keyspace_ids = [0x9000000000000000, 0xD000000000000000, 0xE000000000000000]
self._insert_multi_value(shard_1_master, 'resharding1', mids,
msg_ids, keyspace_ids)
# This update targets two shards.
self._exec_non_annotated_update(shard_1_master, 'resharding1',
[10000011, 10000012], 'update1')
# This update targets one shard.
self._exec_non_annotated_update(shard_1_master, 'resharding1',
[10000013], 'update2')
mids = [10000014, 10000015, 10000016]
msg_ids = ['msg-id10000014', 'msg-id10000015', 'msg-id10000016']
keyspace_ids = [0x9000000000000000, 0xD000000000000000, 0xE000000000000000]
self._insert_multi_value(shard_1_master, 'resharding1', mids,
msg_ids, keyspace_ids)
# This delete targets two shards.
self._exec_non_annotated_delete(shard_1_master, 'resharding1',
[10000014, 10000015])
# This delete targets one shard.
self._exec_non_annotated_delete(shard_1_master, 'resharding1', [10000016])
def _check_multi_shard_values(self):
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000001, 'msg-id10000001', 0x9000000000000000)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000002, 'msg-id10000002', 0xD000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000003, 'msg-id10000003', 0xE000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000001, 'msg-id10000001', 0x9000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000002, 'msg-id10000002', 0xD000000000000000)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000003, 'msg-id10000003', 0xE000000000000000)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000004, 'msg-id10000004', 0xD000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000005, 'msg-id10000005', 0xE000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000004, 'msg-id10000004', 0xD000000000000000)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000005, 'msg-id10000005', 0xE000000000000000)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2],
'resharding1', 10000011, 'update1', 0x9000000000000000)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000012, 'update1', 0xD000000000000000)
self._check_multi_dbs(
[shard_3_master, shard_3_replica],
'resharding1', 10000013, 'update2', 0xE000000000000000)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2,
shard_3_master, shard_3_replica],
'resharding1', 10000014, 'msg-id10000014', 0x9000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2,
shard_3_master, shard_3_replica],
'resharding1', 10000015, 'msg-id10000015', 0xD000000000000000,
should_be_here=False)
self._check_multi_dbs(
[shard_2_master, shard_2_replica1, shard_2_replica2,
shard_3_master, shard_3_replica],
'resharding1', 10000016, 'msg-id10000016', 0xF000000000000000,
should_be_here=False)
# _check_multi_dbs checks the row in multiple dbs.
def _check_multi_dbs(self, dblist, table, mid, msg, keyspace_id,
should_be_here=True):
for db in dblist:
self._check_value(db, table, mid, msg, keyspace_id, should_be_here)
# _check_lots returns how many of the values we have, in percents.
def _check_lots(self, count, base=0):
found = 0
for i in xrange(count):
if self._is_value_present_and_correct(shard_2_replica2, 'resharding1',
10000 + base + i, 'msg-range1-%d' %
i, 0xA000000000000000 + base + i):
found += 1
if self._is_value_present_and_correct(shard_3_replica, 'resharding1',
20000 + base + i, 'msg-range2-%d' %
i, 0xE000000000000000 + base + i):
found += 1
percent = found * 100 / count / 2
logging.debug('I have %d%% of the data', percent)
return percent
def _check_lots_timeout(self, count, threshold, timeout, base=0):
while True:
value = self._check_lots(count, base=base)
if value >= threshold:
return value
timeout = utils.wait_step('waiting for %d%% of the data' % threshold,
timeout, sleep_time=1)
# _check_lots_not_present makes sure no data is in the wrong shard
def _check_lots_not_present(self, count, base=0):
for i in xrange(count):
self._check_value(shard_3_replica, 'resharding1', 10000 + base + i,
'msg-range1-%d' % i, 0xA000000000000000 + base + i,
should_be_here=False)
self._check_value(shard_2_replica2, 'resharding1', 20000 + base + i,
'msg-range2-%d' % i, 0xE000000000000000 + base + i,
should_be_here=False)
def test_resharding(self):
# we're going to reparent and swap these two
global shard_2_master, shard_2_replica1
utils.run_vtctl(['CreateKeyspace',
'--sharding_column_name', 'bad_column',
'--sharding_column_type', 'bytes',
'test_keyspace'])
utils.run_vtctl(['SetKeyspaceShardingInfo', 'test_keyspace',
'custom_ksid_col', 'uint64'], expect_fail=True)
utils.run_vtctl(['SetKeyspaceShardingInfo', '-force',
'test_keyspace',
'custom_ksid_col', base_sharding.keyspace_id_type])
shard_0_master.init_tablet('replica', 'test_keyspace', '-80')
shard_0_replica.init_tablet('replica', 'test_keyspace', '-80')
shard_0_ny_rdonly.init_tablet('rdonly', 'test_keyspace', '-80')
shard_1_master.init_tablet('replica', 'test_keyspace', '80-')
shard_1_slave1.init_tablet('replica', 'test_keyspace', '80-')
shard_1_slave2.init_tablet('replica', 'test_keyspace', '80-')
shard_1_ny_rdonly.init_tablet('rdonly', 'test_keyspace', '80-')
shard_1_rdonly1.init_tablet('rdonly', 'test_keyspace', '80-')
utils.run_vtctl(['RebuildKeyspaceGraph', 'test_keyspace'], auto_log=True)
ks = utils.run_vtctl_json(['GetSrvKeyspace', 'test_nj', 'test_keyspace'])
self.assertEqual(ks['sharding_column_name'], 'custom_ksid_col')
# we set full_mycnf_args to True as a test in the KIT_BYTES case
full_mycnf_args = (base_sharding.keyspace_id_type ==
keyrange_constants.KIT_BYTES)
# create databases so vttablet can start behaving somewhat normally
for t in [shard_0_master, shard_0_replica, shard_0_ny_rdonly,
shard_1_master, shard_1_slave1, shard_1_slave2, shard_1_ny_rdonly,
shard_1_rdonly1]:
t.create_db('vt_test_keyspace')
t.start_vttablet(wait_for_state=None, full_mycnf_args=full_mycnf_args)
# wait for the tablets (replication is not setup, they won't be healthy)
for t in [shard_0_master, shard_0_replica, shard_0_ny_rdonly,
shard_1_master, shard_1_slave1, shard_1_slave2, shard_1_ny_rdonly,
shard_1_rdonly1]:
t.wait_for_vttablet_state('NOT_SERVING')
# reparent to make the tablets work
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/-80',
shard_0_master.tablet_alias], auto_log=True)
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/80-',
shard_1_master.tablet_alias], auto_log=True)
# check the shards
shards = utils.run_vtctl_json(['FindAllShardsInKeyspace', 'test_keyspace'])
self.assertIn('-80', shards, 'unexpected shards: %s' % str(shards))
self.assertIn('80-', shards, 'unexpected shards: %s' % str(shards))
self.assertEqual(len(shards), 2, 'unexpected shards: %s' % str(shards))
# create the tables
self._create_schema()
self._insert_startup_values()
# run a health check on source replicas so they respond to discovery
# (for binlog players) and on the source rdonlys (for workers)
for t in [shard_0_replica, shard_1_slave1]:
utils.run_vtctl(['RunHealthCheck', t.tablet_alias])
for t in [shard_0_ny_rdonly, shard_1_ny_rdonly, shard_1_rdonly1]:
utils.run_vtctl(['RunHealthCheck', t.tablet_alias])
# create the split shards
shard_2_master.init_tablet('replica', 'test_keyspace', '80-c0')
shard_2_replica1.init_tablet('replica', 'test_keyspace', '80-c0')
shard_2_replica2.init_tablet('replica', 'test_keyspace', '80-c0')
shard_2_rdonly1.init_tablet('rdonly', 'test_keyspace', '80-c0')
shard_3_master.init_tablet('replica', 'test_keyspace', 'c0-')
shard_3_replica.init_tablet('replica', 'test_keyspace', 'c0-')
shard_3_rdonly1.init_tablet('rdonly', 'test_keyspace', 'c0-')
# start vttablet on the split shards (no db created,
# so they're all not serving)
shard_2_master.start_vttablet(wait_for_state=None)
shard_3_master.start_vttablet(wait_for_state=None)
for t in [shard_2_replica1, shard_2_replica2, shard_2_rdonly1,
shard_3_replica, shard_3_rdonly1]:
t.start_vttablet(wait_for_state=None)
for t in [shard_2_master, shard_2_replica1, shard_2_replica2,
shard_2_rdonly1,
shard_3_master, shard_3_replica, shard_3_rdonly1]:
t.wait_for_vttablet_state('NOT_SERVING')
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/80-c0',
shard_2_master.tablet_alias], auto_log=True)
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/c0-',
shard_3_master.tablet_alias], auto_log=True)
# check the shards
shards = utils.run_vtctl_json(['FindAllShardsInKeyspace', 'test_keyspace'])
for s in ['-80', '80-', '80-c0', 'c0-']:
self.assertIn(s, shards, 'unexpected shards: %s' % str(shards))
self.assertEqual(len(shards), 4, 'unexpected shards: %s' % str(shards))
utils.run_vtctl(['RebuildKeyspaceGraph', 'test_keyspace'],
auto_log=True)
utils.check_srv_keyspace(
'test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
# disable shard_1_slave2, so we're sure filtered replication will go
# from shard_1_slave1
utils.run_vtctl(['ChangeSlaveType', shard_1_slave2.tablet_alias, 'spare'])
shard_1_slave2.wait_for_vttablet_state('NOT_SERVING')
# we need to create the schema, and the worker will do data copying
for keyspace_shard in ('test_keyspace/80-c0', 'test_keyspace/c0-'):
utils.run_vtctl(['CopySchemaShard', '--exclude_tables', 'unrelated',
shard_1_rdonly1.tablet_alias, keyspace_shard],
auto_log=True)
# Run vtworker as daemon for the following SplitClone commands.
worker_proc, worker_port, worker_rpc_port = utils.run_vtworker_bg(
['--cell', 'test_nj', '--command_display_interval', '10ms'],
auto_log=True)
# Copy the data from the source to the destination shards.
# --max_tps is only specified to enable the throttler and ensure that the
# code is executed. But the intent here is not to throttle the test, hence
# the rate limit is set very high.
#
# Initial clone (online).
workerclient_proc = utils.run_vtworker_client_bg(
['SplitClone',
'--offline=false',
'--exclude_tables', 'unrelated',
'--chunk_count', '10',
'--min_rows_per_chunk', '1',
'--min_healthy_rdonly_tablets', '1',
'--max_tps', '9999',
'test_keyspace/80-'],
worker_rpc_port)
utils.wait_procs([workerclient_proc])
self.verify_reconciliation_counters(worker_port, 'Online', 'resharding1',
2, 0, 0, 0)
# Reset vtworker such that we can run the next command.
workerclient_proc = utils.run_vtworker_client_bg(['Reset'], worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Test the correct handling of keyspace_id changes which happen after
# the first clone.
# Let row 2 go to shard 3 instead of shard 2.
shard_1_master.mquery('vt_test_keyspace',
'update resharding1 set'
' custom_ksid_col=0xD000000000000000 WHERE id=2',
write=True)
workerclient_proc = utils.run_vtworker_client_bg(
['SplitClone',
'--offline=false',
'--exclude_tables', 'unrelated',
'--chunk_count', '10',
'--min_rows_per_chunk', '1',
'--min_healthy_rdonly_tablets', '1',
'--max_tps', '9999',
'test_keyspace/80-'],
worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Row 2 will be deleted from shard 2 and inserted to shard 3.
self.verify_reconciliation_counters(worker_port, 'Online', 'resharding1',
1, 0, 1, 1)
self._check_value(shard_2_master, 'resharding1', 2, 'msg2',
0xD000000000000000, should_be_here=False)
self._check_value(shard_3_master, 'resharding1', 2, 'msg2',
0xD000000000000000)
# Reset vtworker such that we can run the next command.
workerclient_proc = utils.run_vtworker_client_bg(['Reset'], worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Move row 2 back to shard 2 from shard 3 by changing the keyspace_id again.
shard_1_master.mquery('vt_test_keyspace',
'update resharding1 set'
' custom_ksid_col=0x9000000000000000 WHERE id=2',
write=True)
workerclient_proc = utils.run_vtworker_client_bg(
['SplitClone',
'--offline=false',
'--exclude_tables', 'unrelated',
'--chunk_count', '10',
'--min_rows_per_chunk', '1',
'--min_healthy_rdonly_tablets', '1',
'--max_tps', '9999',
'test_keyspace/80-'],
worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Row 2 will be deleted from shard 3 and inserted to shard 2.
self.verify_reconciliation_counters(worker_port, 'Online', 'resharding1',
1, 0, 1, 1)
self._check_value(shard_2_master, 'resharding1', 2, 'msg2',
0x9000000000000000)
self._check_value(shard_3_master, 'resharding1', 2, 'msg2',
0x9000000000000000, should_be_here=False)
# Reset vtworker such that we can run the next command.
workerclient_proc = utils.run_vtworker_client_bg(['Reset'], worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Modify the destination shard. SplitClone will revert the changes.
# Delete row 2 (provokes an insert).
shard_2_master.mquery('vt_test_keyspace',
'delete from resharding1 where id=2', write=True)
# Update row 3 (provokes an update).
shard_3_master.mquery('vt_test_keyspace',
"update resharding1 set msg='msg-not-3' where id=3",
write=True)
# Insert row 4 and 5 (provokes a delete).
self._insert_value(shard_3_master, 'resharding1', 4, 'msg4',
0xD000000000000000)
self._insert_value(shard_3_master, 'resharding1', 5, 'msg5',
0xD000000000000000)
workerclient_proc = utils.run_vtworker_client_bg(
['SplitClone',
'--exclude_tables', 'unrelated',
'--chunk_count', '10',
'--min_rows_per_chunk', '1',
'--min_healthy_rdonly_tablets', '1',
'--max_tps', '9999',
'test_keyspace/80-'],
worker_rpc_port)
utils.wait_procs([workerclient_proc])
# Change tablet, which was taken offline, back to rdonly.
utils.run_vtctl(['ChangeSlaveType', shard_1_rdonly1.tablet_alias,
'rdonly'], auto_log=True)
self.verify_reconciliation_counters(worker_port, 'Online', 'resharding1',
1, 1, 2, 0)
self.verify_reconciliation_counters(worker_port, 'Offline', 'resharding1',
0, 0, 0, 2)
# Terminate worker daemon because it is no longer needed.
utils.kill_sub_process(worker_proc, soft=True)
# TODO(alainjobart): experiment with the dontStartBinlogPlayer option
# check the startup values are in the right place
self._check_startup_values()
# check the schema too
utils.run_vtctl(['ValidateSchemaKeyspace', '--exclude_tables=unrelated',
'test_keyspace'], auto_log=True)
# check the binlog players are running and exporting vars
self.check_destination_master(shard_2_master, ['test_keyspace/80-'])
self.check_destination_master(shard_3_master, ['test_keyspace/80-'])
# When the binlog players/filtered replication is turned on, the query
# service must be turned off on the destination masters.
# The tested behavior is a safeguard to prevent that somebody can
# accidentally modify data on the destination masters while they are not
# migrated yet and the source shards are still the source of truth.
shard_2_master.wait_for_vttablet_state('NOT_SERVING')
shard_3_master.wait_for_vttablet_state('NOT_SERVING')
# check that binlog server exported the stats vars
self.check_binlog_server_vars(shard_1_slave1, horizontal=True)
# Check that the throttler was enabled.
self.check_throttler_service(shard_2_master.rpc_endpoint(),
['BinlogPlayer/0'], 9999)
self.check_throttler_service(shard_3_master.rpc_endpoint(),
['BinlogPlayer/0'], 9999)
# testing filtered replication: insert a bunch of data on shard 1,
# check we get most of it after a few seconds, wait for binlog server
# timeout, check we get all of it.
logging.debug('Inserting lots of data on source shard')
self._insert_lots(1000)
logging.debug('Executing MultiValue Insert Queries')
self._exec_multi_shard_dmls()
logging.debug('Checking 80 percent of data is sent quickly')
v = self._check_lots_timeout(1000, 80, 5)
if v != 100:
# small optimization: only do this check if we don't have all the data
# already anyway.
logging.debug('Checking all data goes through eventually')
self._check_lots_timeout(1000, 100, 20)
logging.debug('Checking no data was sent the wrong way')
self._check_lots_not_present(1000)
logging.debug('Checking MultiValue Insert Queries')
self._check_multi_shard_values()
self.check_binlog_player_vars(shard_2_master, ['test_keyspace/80-'],
seconds_behind_master_max=30)
self.check_binlog_player_vars(shard_3_master, ['test_keyspace/80-'],
seconds_behind_master_max=30)
self.check_binlog_server_vars(shard_1_slave1, horizontal=True,
min_statements=1000, min_transactions=1000)
# use vtworker to compare the data (after health-checking the destination
# rdonly tablets so discovery works)
utils.run_vtctl(['RunHealthCheck', shard_3_rdonly1.tablet_alias])
logging.debug('Running vtworker SplitDiff')
utils.run_vtworker(['-cell', 'test_nj', 'SplitDiff',
'--exclude_tables', 'unrelated',
'--min_healthy_rdonly_tablets', '1',
'test_keyspace/c0-'],
auto_log=True)
utils.run_vtctl(['ChangeSlaveType', shard_1_rdonly1.tablet_alias, 'rdonly'],
auto_log=True)
utils.run_vtctl(['ChangeSlaveType', shard_3_rdonly1.tablet_alias, 'rdonly'],
auto_log=True)
utils.pause('Good time to test vtworker for diffs')
# get status for destination master tablets, make sure we have it all
if base_sharding.use_rbr:
# We submitted non-annotated DMLs, that are properly routed
# with RBR, but not with SBR. So the first shard counts
# are smaller. In the second shard, we submitted statements
# that affect more than one keyspace id. These will result
# in two queries with RBR. So the count there is higher.
self.check_running_binlog_player(shard_2_master, 4018, 2008)
self.check_running_binlog_player(shard_3_master, 4028, 2008)
else:
self.check_running_binlog_player(shard_2_master, 4022, 2008)
self.check_running_binlog_player(shard_3_master, 4024, 2008)
# start a thread to insert data into shard_1 in the background
# with current time, and monitor the delay
insert_thread_1 = InsertThread(shard_1_master, 'insert_low', 1, 10000,
0x9000000000000000)
insert_thread_2 = InsertThread(shard_1_master, 'insert_high', 2, 10001,
0xD000000000000000)
monitor_thread_1 = MonitorLagThread(shard_2_replica2, 'insert_low', 1)
monitor_thread_2 = MonitorLagThread(shard_3_replica, 'insert_high', 2)
# tests a failover switching serving to a different replica
utils.run_vtctl(['ChangeSlaveType', shard_1_slave2.tablet_alias, 'replica'])
utils.run_vtctl(['ChangeSlaveType', shard_1_slave1.tablet_alias, 'spare'])
shard_1_slave2.wait_for_vttablet_state('SERVING')
shard_1_slave1.wait_for_vttablet_state('NOT_SERVING')
utils.run_vtctl(['RunHealthCheck', shard_1_slave2.tablet_alias])
# test data goes through again
logging.debug('Inserting lots of data on source shard')
self._insert_lots(1000, base=1000)
logging.debug('Checking 80 percent of data was sent quickly')
self._check_lots_timeout(1000, 80, 5, base=1000)
self.check_binlog_server_vars(shard_1_slave2, horizontal=True,
min_statements=800, min_transactions=800)
# check we can't migrate the master just yet
utils.run_vtctl(['MigrateServedTypes', 'test_keyspace/80-', 'master'],
expect_fail=True)
# check query service is off on master 2 and master 3, as filtered
# replication is enabled. Even health check that is enabled on
# master 3 should not interfere (we run it to be sure).
utils.run_vtctl(['RunHealthCheck', shard_3_master.tablet_alias],
auto_log=True)
for master in [shard_2_master, shard_3_master]:
utils.check_tablet_query_service(self, master, False, False)
stream_health = utils.run_vtctl_json(['VtTabletStreamHealth',
'-count', '1',
master.tablet_alias])
logging.debug('Got health: %s', str(stream_health))
self.assertIn('realtime_stats', stream_health)
self.assertNotIn('serving', stream_health)
# check the destination master 3 is healthy, even though its query
# service is not running (if not healthy this would exception out)
shard_3_master.get_healthz()
# now serve rdonly from the split shards, in test_nj only
utils.run_vtctl(['MigrateServedTypes', '--cells=test_nj',
'test_keyspace/80-', 'rdonly'], auto_log=True)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_srv_keyspace('test_ny', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_tablet_query_service(self, shard_0_ny_rdonly, True, False)
utils.check_tablet_query_service(self, shard_1_ny_rdonly, True, False)
utils.check_tablet_query_service(self, shard_1_rdonly1, False, True)
# now serve rdonly from the split shards, everywhere
utils.run_vtctl(['MigrateServedTypes', 'test_keyspace/80-', 'rdonly'],
auto_log=True)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_srv_keyspace('test_ny', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_tablet_query_service(self, shard_0_ny_rdonly, True, False)
utils.check_tablet_query_service(self, shard_1_ny_rdonly, False, True)
utils.check_tablet_query_service(self, shard_1_rdonly1, False, True)
# then serve replica from the split shards
destination_shards = ['test_keyspace/80-c0', 'test_keyspace/c0-']
utils.run_vtctl(['MigrateServedTypes', 'test_keyspace/80-', 'replica'],
auto_log=True)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-c0 c0-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_tablet_query_service(self, shard_1_slave2, False, True)
# move replica back and forth
utils.run_vtctl(
['MigrateServedTypes', '-reverse', 'test_keyspace/80-', 'replica'],
auto_log=True)
# After a backwards migration, queryservice should be enabled on
# source and disabled on destinations
utils.check_tablet_query_service(self, shard_1_slave2, True, False)
# Destination tablets would have query service disabled for other
# reasons than the migration, so check the shard record instead of
# the tablets directly.
utils.check_shard_query_services(self, destination_shards,
topodata_pb2.REPLICA, False)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.run_vtctl(['MigrateServedTypes', 'test_keyspace/80-', 'replica'],
auto_log=True)
# After a forwards migration, queryservice should be disabled on
# source and enabled on destinations
utils.check_tablet_query_service(self, shard_1_slave2, False, True)
# Destination tablets would have query service disabled for other
# reasons than the migration, so check the shard record instead of
# the tablets directly
utils.check_shard_query_services(self, destination_shards,
topodata_pb2.REPLICA, True)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-c0 c0-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
# reparent shard_2 to shard_2_replica1, then insert more data and
# see it flow through still
utils.run_vtctl(['PlannedReparentShard',
'-keyspace_shard', 'test_keyspace/80-c0',
'-new_master', shard_2_replica1.tablet_alias])
# update our test variables to point at the new master
shard_2_master, shard_2_replica1 = shard_2_replica1, shard_2_master
logging.debug('Inserting lots of data on source shard after reparenting')
self._insert_lots(3000, base=2000)
logging.debug('Checking 80 percent of data was sent fairly quickly')
self._check_lots_timeout(3000, 80, 10, base=2000)
# use vtworker to compare the data again
logging.debug('Running vtworker SplitDiff')
utils.run_vtworker(['-cell', 'test_nj', 'SplitDiff',
'--exclude_tables', 'unrelated',
'--min_healthy_rdonly_tablets', '1',
'test_keyspace/c0-'],
auto_log=True)
utils.run_vtctl(['ChangeSlaveType', shard_1_rdonly1.tablet_alias, 'rdonly'],
auto_log=True)
utils.run_vtctl(['ChangeSlaveType', shard_3_rdonly1.tablet_alias, 'rdonly'],
auto_log=True)
# going to migrate the master now, check the delays
monitor_thread_1.done = True
monitor_thread_2.done = True
insert_thread_1.done = True
insert_thread_2.done = True
logging.debug('DELAY 1: %s max_lag=%d ms avg_lag=%d ms',
monitor_thread_1.thread_name,
monitor_thread_1.max_lag_ms,
monitor_thread_1.lag_sum_ms / monitor_thread_1.sample_count)
logging.debug('DELAY 2: %s max_lag=%d ms avg_lag=%d ms',
monitor_thread_2.thread_name,
monitor_thread_2.max_lag_ms,
monitor_thread_2.lag_sum_ms / monitor_thread_2.sample_count)
# mock with the SourceShard records to test 'vtctl SourceShardDelete'
# and 'vtctl SourceShardAdd'
utils.run_vtctl(['SourceShardDelete', 'test_keyspace/c0-', '0'],
auto_log=True)
utils.run_vtctl(['SourceShardAdd', '--key_range=80-',
'test_keyspace/c0-', '0', 'test_keyspace/80-'],
auto_log=True)
# then serve master from the split shards, make sure the source master's
# query service is now turned off
utils.run_vtctl(['MigrateServedTypes', 'test_keyspace/80-', 'master'],
auto_log=True)
utils.check_srv_keyspace('test_nj', 'test_keyspace',
'Partitions(master): -80 80-c0 c0-\n'
'Partitions(rdonly): -80 80-c0 c0-\n'
'Partitions(replica): -80 80-c0 c0-\n',
keyspace_id_type=base_sharding.keyspace_id_type,
sharding_column_name='custom_ksid_col')
utils.check_tablet_query_service(self, shard_1_master, False, True)
# check the binlog players are gone now
self.check_no_binlog_player(shard_2_master)
self.check_no_binlog_player(shard_3_master)
# delete the original tablets in the original shard
tablet.kill_tablets([shard_1_master, shard_1_slave1, shard_1_slave2,
shard_1_ny_rdonly, shard_1_rdonly1])
for t in [shard_1_slave1, shard_1_slave2, shard_1_ny_rdonly,
shard_1_rdonly1]:
utils.run_vtctl(['DeleteTablet', t.tablet_alias], auto_log=True)
utils.run_vtctl(['DeleteTablet', '-allow_master',
shard_1_master.tablet_alias], auto_log=True)
# rebuild the serving graph, all mentions of the old shards shoud be gone
utils.run_vtctl(
['RebuildKeyspaceGraph', 'test_keyspace'], auto_log=True)
# test RemoveShardCell
utils.run_vtctl(
['RemoveShardCell', 'test_keyspace/-80', 'test_nj'], auto_log=True,
expect_fail=True)
utils.run_vtctl(
['RemoveShardCell', 'test_keyspace/80-', 'test_nj'], auto_log=True)
utils.run_vtctl(
['RemoveShardCell', 'test_keyspace/80-', 'test_ny'], auto_log=True)
shard = utils.run_vtctl_json(['GetShard', 'test_keyspace/80-'])
self.assertNotIn('cells', shard)
# delete the original shard
utils.run_vtctl(['DeleteShard', 'test_keyspace/80-'], auto_log=True)
# make sure we can't delete the destination shard now that it's serving
_, stderr = utils.run_vtctl(['DeleteShard', 'test_keyspace/80-c0'],
expect_fail=True)
self.assertIn('is still serving, cannot delete it', stderr)
# kill everything
tablet.kill_tablets([shard_0_master, shard_0_replica, shard_0_ny_rdonly,
shard_2_master, shard_2_replica1, shard_2_replica2,
shard_2_rdonly1,
shard_3_master, shard_3_replica, shard_3_rdonly1])
if __name__ == '__main__':
utils.main()
| |
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Receive push events for new builds and upload rows to BigQuery."""
from __future__ import print_function
import argparse
import json
import os
import pprint
import socket
import sys
import traceback
import time
import multiprocessing.pool
try:
from google.cloud import bigquery
from google.cloud import pubsub
import google.cloud.exceptions
except ImportError:
print('WARNING: unable to load google cloud (test environment?)')
traceback.print_exc()
import model
import make_db
import make_json
def process_changes(results):
"""Split GCS change events into trivial acks and builds to further process."""
acks = [] # pubsub message ids to acknowledge
todo = [] # (id, job, build) of builds to grab
# process results, find finished builds to process
for ack_id, message in results:
if message.attributes['eventType'] != 'OBJECT_FINALIZE':
acks.append(ack_id)
continue
obj = message.attributes['objectId']
if not obj.endswith('/finished.json'):
acks.append(ack_id)
continue
job, build = obj[:-len('/finished.json')].rsplit('/', 1)
job = 'gs://%s/%s' % (message.attributes['bucketId'], job)
todo.append((ack_id, job, build))
return acks, todo
def get_started_finished(gcs_client, db, todo):
"""Download started/finished.json from build dirs in todo."""
acks = []
build_dirs = []
pool = multiprocessing.pool.ThreadPool(16)
try:
for ack_id, (build_dir, started, finished) in pool.imap_unordered(
lambda (ack_id, job, build): (ack_id, gcs_client.get_started_finished(job, build)),
todo):
if finished:
if not db.insert_build(build_dir, started, finished):
print('already present??')
start = time.localtime(started.get('timestamp', 0) if started else 0)
print(build_dir, bool(started), bool(finished),
time.strftime('%F %T %Z', start),
finished and finished.get('result'))
build_dirs.append(build_dir)
acks.append(ack_id)
else:
print('finished.json missing?', build_dir, started, finished)
finally:
pool.close()
db.commit()
return acks, build_dirs
def row_to_mapping(row, schema):
"""Convert a dictionary to a list for bigquery.Table.insert_data.
Silly. See https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3396
"""
return [row.get(field.name, [] if field.mode == 'REPEATED' else None) for field in schema]
def retry(func, *args, **kwargs):
"""Run a function with arguments, retrying on server errors. """
# pylint: disable=no-member
for attempt in xrange(20):
try:
return func(*args, **kwargs)
except (socket.error, google.cloud.exceptions.ServerError):
# retry with exponential backoff
traceback.print_exc()
time.sleep(1.4 ** attempt)
return func(*args, **kwargs) # one last attempt
def insert_data(table, rows_iter):
"""Upload rows from rows_iter into bigquery table table.
rows_iter should return a series of (row_id, row dictionary) tuples.
The row dictionary must match the table's schema.
Returns the row_ids that were inserted.
"""
emitted = set()
rows = []
row_ids = []
for row_id, row in rows_iter:
emitted.add(row_id)
if len(json.dumps(row)) > 1e6:
print('ERROR: row too long', row['path'])
continue
row = row_to_mapping(row, table.schema)
rows.append(row)
row_ids.append(row_id)
if not rows: # nothing to do
return []
def insert(table, rows, row_ids):
"""Insert rows with row_ids into table, retrying as necessary."""
errors = retry(table.insert_data, rows, row_ids, skip_invalid_rows=True)
if not errors:
print('Loaded {} builds into {}'.format(len(rows), table.name))
else:
print('Errors:')
pprint.pprint(errors)
pprint.pprint(table.schema)
if len(json.dumps(rows)) > 10e6:
print('WARNING: too big for one insert, doing stupid slow version')
for row, row_id in zip(rows, row_ids):
insert(table, [row], [row_id])
else:
insert(table, rows, row_ids)
return emitted
def main(db, sub, tables, client_class=make_db.GCSClient, stop=None):
# pylint: disable=too-many-locals
gcs_client = client_class('', {})
if stop is None:
stop = lambda: False
results = [0] * 1000 # don't sleep on first loop
while not stop():
print()
if len(results) < 10 and client_class is make_db.GCSClient:
time.sleep(5) # slow down!
print('====', time.strftime("%F %T %Z"), '=' * 40)
results = retry(sub.pull, max_messages=1000)
start = time.time()
while time.time() < start + 7:
results_more = sub.pull(max_messages=1000, return_immediately=True)
if not results_more:
break
results += results_more
print('PULLED', len(results))
acks, todo = process_changes(results)
if acks:
print('ACK irrelevant', len(acks))
for n in xrange(0, len(acks), 1000):
retry(sub.acknowledge, acks[n: n + 1000])
if todo:
print('EXTEND-ACK ', len(todo))
# give 3 minutes to grab build details
retry(sub.modify_ack_deadline, [i for i, _j, _b in todo], 60*3)
acks, build_dirs = get_started_finished(gcs_client, db, todo)
# notify pubsub queue that we've handled the finished.json messages
if acks:
print('ACK "finished.json"', len(acks))
retry(sub.acknowledge, acks)
# grab junit files for new builds
make_db.download_junit(db, 16, client_class)
# stream new rows to tables
if build_dirs and tables:
for table, incremental_table in tables.itervalues():
builds = db.get_builds_from_paths(build_dirs, incremental_table)
emitted = insert_data(table, make_json.make_rows(db, builds))
db.insert_emitted(emitted, incremental_table)
def load_sub(poll):
"""Return the PubSub subscription specificed by the /-separated input."""
project, topic, subscription = poll.split('/')
pubsub_client = pubsub.Client(project)
return pubsub_client.topic(topic).subscription(subscription)
def load_schema(schemafield):
"""Construct the expected BigQuery schema from files on disk.
Only used for new tables."""
basedir = os.path.dirname(__file__)
schema_json = json.load(open(os.path.join(basedir, 'schema.json')))
def make_field(spec):
spec['field_type'] = spec.pop('type')
if 'fields' in spec:
spec['fields'] = [make_field(f) for f in spec['fields']]
return schemafield(**spec)
return [make_field(s) for s in schema_json]
def load_tables(dataset, tablespecs):
"""Construct a dictionary of BigQuery tables given the input tablespec.
Args:
dataset: bigquery.Dataset
tablespecs: list of strings of "NAME:DAYS", e.g. ["day:1"]
Returns:
{name: (bigquery.Table, incremental table name)}
"""
project, dataset_name = dataset.split(':')
dataset = bigquery.Client(project).dataset(dataset_name)
tables = {}
for spec in tablespecs:
name, days = spec.split(':')
table = dataset.table(name)
try:
table.reload()
except google.cloud.exceptions.NotFound: # pylint: disable=no-member
table.schema = load_schema(bigquery.schema.SchemaField)
table.create()
tables[name] = (table, make_json.get_table(float(days)))
return tables
class StopWhen(object):
"""A simple object that returns True once when the given hour begins."""
def __init__(self, target, clock=lambda: time.localtime().tm_hour):
self.clock = clock
self.last = self.clock()
self.target = target
def __call__(self):
if os.path.exists('stop'):
return True
now = self.clock()
last = self.last
self.last = now
return now != last and now == self.target
def get_options(argv):
"""Process command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--poll',
required=True,
help='Follow GCS changes from project/topic/subscription',
)
parser.add_argument(
'--dataset',
help='BigQuery dataset (e.g. k8s-gubernator:build)'
)
parser.add_argument(
'--tables',
nargs='+',
default=[],
help='Upload rows to table:days [e.g. --tables day:1 week:7 all:0]',
)
parser.add_argument(
'--stop_at',
type=int,
help='Terminate when this hour (0-23) rolls around (in local time).'
)
return parser.parse_args(argv)
if __name__ == '__main__':
OPTIONS = get_options(sys.argv[1:])
main(model.Database('build.db'),
load_sub(OPTIONS.poll),
load_tables(OPTIONS.dataset, OPTIONS.tables),
stop=StopWhen(OPTIONS.stop_at))
| |
import time
import pytest
import uqbar.strings
import supriya.assets.synthdefs
import supriya.nonrealtime
import supriya.patterns
import supriya.ugens
from supriya import Parameter, SynthDefBuilder
pbus_01 = supriya.patterns.Pbus(
pattern=supriya.patterns.Pbind(
amplitude=1.0,
duration=supriya.patterns.Pseq([1.0, 2.0, 3.0], 1),
frequency=supriya.patterns.Pseq([440, 660, 880], 1),
),
release_time=0.25,
)
pbus_02 = supriya.patterns.Pbus(
supriya.patterns.Pmono(
amplitude=1.0,
duration=0.75,
frequency=supriya.patterns.Pseq([222, 333, 444], 1),
)
)
def test___iter___01():
events = list(pbus_01)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
NoteEvent(
amplitude=1.0,
delta=1.0,
duration=1.0,
frequency=440,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
NoteEvent(
amplitude=1.0,
delta=2.0,
duration=2.0,
frequency=660,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('E'),
)
NoteEvent(
amplitude=1.0,
delta=3.0,
duration=3.0,
frequency=880,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('F'),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test___iter___02():
events = list(pbus_02)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
NoteEvent(
amplitude=1.0,
delta=0.75,
duration=0.75,
frequency=222,
is_stop=False,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
NoteEvent(
amplitude=1.0,
delta=0.75,
duration=0.75,
frequency=333,
is_stop=False,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
NoteEvent(
amplitude=1.0,
delta=0.75,
duration=0.75,
frequency=444,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test_send_01a():
events = pytest.helpers.setup_pattern_send(pbus_01, iterations=1)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test_send_01b():
events = pytest.helpers.setup_pattern_send(pbus_01, iterations=2)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
NoteEvent(
amplitude=1.0,
delta=1.0,
duration=1.0,
frequency=440,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test_send_02a():
events = pytest.helpers.setup_pattern_send(pbus_02, iterations=1)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test_send_02b():
events = pytest.helpers.setup_pattern_send(pbus_02, iterations=2)
assert pytest.helpers.get_objects_as_string(
events, replace_uuids=True
) == uqbar.strings.normalize(
"""
CompositeEvent(
events=(
BusEvent(
calculation_rate=CalculationRate.AUDIO,
channel_count=2,
uuid=UUID('A'),
),
GroupEvent(
uuid=UUID('B'),
),
SynthEvent(
add_action=AddAction.ADD_AFTER,
amplitude=1.0,
fade_time=0.25,
in_=UUID('A'),
synthdef=<SynthDef: system_link_audio_2>,
target_node=UUID('B'),
uuid=UUID('C'),
),
),
)
NoteEvent(
amplitude=1.0,
delta=0.75,
duration=0.75,
frequency=222,
is_stop=False,
out=UUID('A'),
target_node=UUID('B'),
uuid=UUID('D'),
)
CompositeEvent(
events=(
SynthEvent(
is_stop=True,
uuid=UUID('C'),
),
NullEvent(
delta=0.25,
),
GroupEvent(
is_stop=True,
uuid=UUID('B'),
),
BusEvent(
calculation_rate=None,
channel_count=None,
is_stop=True,
uuid=UUID('A'),
),
),
is_stop=True,
)
"""
)
def test_manual_incommunicado():
lists, deltas = pytest.helpers.manual_incommunicado(pbus_01)
assert lists == [
[
10,
[
["/g_new", 1000, 0, 1],
[
"/s_new",
"system_link_audio_2",
1001,
3,
1000,
"fade_time",
0.25,
"in_",
0,
],
[
"/s_new",
"default",
1002,
0,
1000,
"amplitude",
1.0,
"frequency",
440,
"out",
0,
],
],
],
[
11.0,
[
["/n_set", 1002, "gate", 0],
[
"/s_new",
"default",
1003,
0,
1000,
"amplitude",
1.0,
"frequency",
660,
"out",
0,
],
],
],
[
13.0,
[
["/n_set", 1003, "gate", 0],
[
"/s_new",
"default",
1004,
0,
1000,
"amplitude",
1.0,
"frequency",
880,
"out",
0,
],
],
],
[16.0, [["/n_set", 1004, "gate", 0], ["/n_free", 1001]]],
[16.25, [["/n_free", 1000]]],
]
assert deltas == [1.0, 2.0, 3.0, 0.25, None]
def test_manual_communicado_pbind_01(server):
player = supriya.patterns.RealtimeEventPlayer(pbus_01, server=server)
# Initial State
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
"""
)
# Step 1
player(0, 0)
server.sync()
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1002 default
out: 16.0, amplitude: 1.0, frequency: 440.0, gate: 1.0, pan: 0.5
1001 system_link_audio_2
done_action: 2.0, fade_time: 0.25, gate: 1.0, in_: 16.0, out: 0.0
"""
)
# Step 2
player(0, 0)
server.sync()
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1003 default
out: 16.0, amplitude: 1.0, frequency: 660.0, gate: 1.0, pan: 0.5
1002 default
out: 16.0, amplitude: 1.0, frequency: 440.0, gate: 0.0, pan: 0.5
1001 system_link_audio_2
done_action: 2.0, fade_time: 0.25, gate: 1.0, in_: 16.0, out: 0.0
"""
)
# Wait for termination
time.sleep(0.5)
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1003 default
out: 16.0, amplitude: 1.0, frequency: 660.0, gate: 1.0, pan: 0.5
1001 system_link_audio_2
done_action: 2.0, fade_time: 0.25, gate: 1.0, in_: 16.0, out: 0.0
"""
)
# Step 3
player(0, 0)
server.sync()
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1004 default
out: 16.0, amplitude: 1.0, frequency: 880.0, gate: 1.0, pan: 0.5
1003 default
out: 16.0, amplitude: 1.0, frequency: 660.0, gate: 0.0, pan: 0.5
1001 system_link_audio_2
done_action: 2.0, fade_time: 0.25, gate: 1.0, in_: 16.0, out: 0.0
"""
)
# Wait for termination
time.sleep(0.5)
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1004 default
out: 16.0, amplitude: 1.0, frequency: 880.0, gate: 1.0, pan: 0.5
1001 system_link_audio_2
done_action: 2.0, fade_time: 0.25, gate: 1.0, in_: 16.0, out: 0.0
"""
)
# Step 4
player(0, 0)
server.sync()
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
1004 default
out: 16.0, amplitude: 1.0, frequency: 880.0, gate: 0.0, pan: 0.5
"""
)
# Wait for termination
time.sleep(0.5)
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
1000 group
"""
)
# Step 4
player(0, 0)
server.sync()
server_state = str(server.query_remote_nodes(include_controls=True))
assert server_state == uqbar.strings.normalize(
r"""
NODE TREE 0 group
1 group
"""
)
def test_nonrealtime_01a():
session = supriya.nonrealtime.Session()
with session.at(0):
final_offset = session.inscribe(pbus_01)
d_recv_commands = pytest.helpers.build_d_recv_commands(
[supriya.assets.synthdefs.system_link_audio_2, supriya.assets.synthdefs.default]
)
assert session.to_lists() == [
[
0.0,
[
*d_recv_commands,
["/g_new", 1000, 0, 0],
[
"/s_new",
"38a2c79fc9d58d06e361337163a4e80f",
1001,
3,
1000,
"fade_time",
0.25,
"in_",
16,
],
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1002,
0,
1000,
"amplitude",
1.0,
"frequency",
440,
"out",
16,
],
],
],
[
1.0,
[
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1003,
0,
1000,
"amplitude",
1.0,
"frequency",
660,
"out",
16,
],
["/n_set", 1002, "gate", 0],
],
],
[
3.0,
[
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1004,
0,
1000,
"amplitude",
1.0,
"frequency",
880,
"out",
16,
],
["/n_set", 1003, "gate", 0],
],
],
[6.0, [["/n_set", 1001, "gate", 0], ["/n_set", 1004, "gate", 0]]],
[6.25, [["/n_free", 1000], [0]]],
]
assert final_offset == 6.25
def test_nonrealtime_01b():
session = supriya.nonrealtime.Session()
with session.at(0):
final_offset = session.inscribe(pbus_01, duration=3)
d_recv_commands = pytest.helpers.build_d_recv_commands(
[supriya.assets.synthdefs.system_link_audio_2, supriya.assets.synthdefs.default]
)
assert session.to_lists() == [
[
0.0,
[
*d_recv_commands,
["/g_new", 1000, 0, 0],
[
"/s_new",
"38a2c79fc9d58d06e361337163a4e80f",
1001,
3,
1000,
"fade_time",
0.25,
"in_",
16,
],
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1002,
0,
1000,
"amplitude",
1.0,
"frequency",
440,
"out",
16,
],
],
],
[
1.0,
[
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1003,
0,
1000,
"amplitude",
1.0,
"frequency",
660,
"out",
16,
],
["/n_set", 1002, "gate", 0],
],
],
[3.0, [["/n_set", 1001, "gate", 0], ["/n_set", 1003, "gate", 0]]],
[3.25, [["/n_free", 1000], [0]]],
]
assert final_offset == 3.25
def test_nonrealtime_01c():
session = supriya.nonrealtime.Session()
with session.at(0):
final_offset = session.inscribe(pbus_01, duration=2)
d_recv_commands = pytest.helpers.build_d_recv_commands(
[supriya.assets.synthdefs.system_link_audio_2, supriya.assets.synthdefs.default]
)
assert session.to_lists() == [
[
0.0,
[
*d_recv_commands,
["/g_new", 1000, 0, 0],
[
"/s_new",
"38a2c79fc9d58d06e361337163a4e80f",
1001,
3,
1000,
"fade_time",
0.25,
"in_",
16,
],
[
"/s_new",
"da0982184cc8fa54cf9d288a0fe1f6ca",
1002,
0,
1000,
"amplitude",
1.0,
"frequency",
440,
"out",
16,
],
],
],
[1.0, [["/n_set", 1001, "gate", 0], ["/n_set", 1002, "gate", 0]]],
[1.25, [["/n_free", 1000], [0]]],
]
assert final_offset == 1.25
def test_nonrealtime_releasetime():
with SynthDefBuilder(out=Parameter(parameter_rate="SCALAR", value=0)) as builder:
supriya.ugens.Line.kr(duration=2),
supriya.ugens.Out.ar(bus=builder["out"], source=supriya.ugens.DC.ar(1))
dc_synthdef = builder.build()
pattern = supriya.patterns.Pbus(
supriya.patterns.Pbind(delta=1, duration=1, synthdef=dc_synthdef),
release_time=1,
)
session = supriya.nonrealtime.Session(0, 1)
with session.at(0):
session.inscribe(pattern, duration=1)
d_recv_commands = pytest.helpers.build_d_recv_commands(
[supriya.assets.synthdefs.system_link_audio_1, dc_synthdef]
)
assert session.to_lists() == [
[
0.0,
[
*d_recv_commands,
["/g_new", 1000, 0, 0],
[
"/s_new",
supriya.assets.synthdefs.system_link_audio_1.anonymous_name,
1001,
3,
1000,
"fade_time",
1.0,
"in_",
1,
],
["/s_new", dc_synthdef.anonymous_name, 1002, 0, 1000, "out", 1],
],
],
[1.0, [["/n_free", 1002], ["/n_set", 1001, "gate", 0]]],
[2.0, [["/n_free", 1000], [0]]],
]
| |
"""
Serve web page and handle web sockets using Tornado.
"""
import json
import time
import asyncio
import socket
import mimetypes
import traceback
import threading
from urllib.parse import urlparse
# from concurrent.futures import ThreadPoolExecutor
import tornado
from tornado import gen, netutil
from tornado.web import Application, RequestHandler
from tornado.ioloop import IOLoop
from tornado.websocket import WebSocketHandler, WebSocketClosedError
from tornado.httpserver import HTTPServer
from tornado.platform.asyncio import AsyncIOMainLoop
from ._app import manager
from ._session import get_page
from ._server import AbstractServer
from ._assetstore import assets
from ._clientcore import serializer
from . import logger
from .. import config
if tornado.version_info < (4, ):
raise RuntimeError('Flexx requires Tornado v4.0 or higher.')
# todo: generalize -> Make Tornado mnore of an implementation detail.
# So we can use e.g. https://github.com/aaugustin/websockets
# todo: threading, or even multi-process
#executor = ThreadPoolExecutor(4)
IMPORT_TIME = time.time()
def is_main_thread():
""" Get whether this is the main thread. """
return isinstance(threading.current_thread(), threading._MainThread)
class TornadoServer(AbstractServer):
""" Flexx Server implemented in Tornado.
"""
def __init__(self, *args, **kwargs):
self._app = None
self._server = None
super().__init__(*args, **kwargs)
def _open(self, host, port, **kwargs):
# Note: does not get called if host is False. That way we can
# run Flexx in e.g. JLab's application.
# Hook Tornado up with asyncio. Flexx' BaseServer makes sure
# that the correct asyncio event loop is current (for this thread).
# http://www.tornadoweb.org/en/stable/asyncio.html
# todo: Since Tornado v5.0 asyncio is autom used, deprecating AsyncIOMainLoop
self._io_loop = AsyncIOMainLoop()
# I am sorry for this hack, but Tornado wont work otherwise :(
# I wonder how long it will take before this will bite me back. I guess
# we will be alright as long as there is no other Tornado stuff going on.
if hasattr(IOLoop, "_current"):
IOLoop._current.instance = None
else:
IOLoop.current().instance = None
self._io_loop.make_current()
# handle ssl, wether from configuration or given args
if config.ssl_certfile:
if 'ssl_options' not in kwargs:
kwargs['ssl_options'] = {}
if 'certfile' not in kwargs['ssl_options']:
kwargs['ssl_options']['certfile'] = config.ssl_certfile
if config.ssl_keyfile:
if 'ssl_options' not in kwargs:
kwargs['ssl_options'] = {}
if 'keyfile' not in kwargs['ssl_options']:
kwargs['ssl_options']['keyfile'] = config.ssl_keyfile
if config.tornado_debug:
app_kwargs = dict(debug=True)
else:
app_kwargs = dict()
# Create tornado application
self._app = Application([(r"/flexx/ws/(.*)", WSHandler),
(r"/flexx/(.*)", MainHandler),
(r"/(.*)", AppHandler), ], **app_kwargs)
self._app._io_loop = self._io_loop
# Create tornado server, bound to our own ioloop
if tornado.version_info < (5, ):
kwargs['io_loop'] = self._io_loop
self._server = HTTPServer(self._app, **kwargs)
# Start server (find free port number if port not given)
if port:
# Turn port into int, use hashed port number if a string was given
try:
port = int(port)
except ValueError:
port = port_hash(port)
self._server.listen(port, host)
else:
# Try N ports in a repeatable range (easier, browser history, etc.)
prefered_port = port_hash('Flexx')
for i in range(8):
port = prefered_port + i
try:
self._server.listen(port, host)
break
except OSError:
pass # address already in use
else:
# Ok, let Tornado figure out a port
[sock] = netutil.bind_sockets(None, host, family=socket.AF_INET)
self._server.add_sockets([sock])
port = sock.getsockname()[1]
# Notify address, so its easy to e.g. copy and paste in the browser
self._serving = self._app._flexx_serving = host, port
proto = 'http'
if 'ssl_options' in kwargs:
proto = 'https'
# This string 'Serving apps at' is our 'ready' signal and is tested for.
logger.info('Serving apps at %s://%s:%i/' % (proto, host, port))
def _close(self):
self._server.stop()
@property
def app(self):
""" The Tornado Application object being used."""
return self._app
@property
def server(self):
""" The Tornado HttpServer object being used."""
return self._server
@property
def protocol(self):
""" Get a string representing served protocol."""
if self._server.ssl_options is not None:
return 'https'
return 'http'
def port_hash(name):
""" Given a string, returns a port number between 49152 and 65535
This range (of 2**14 posibilities) is the range for dynamic and/or
private ports (ephemeral ports) specified by iana.org. The algorithm
is deterministic.
"""
fac = 0xd2d84a61
val = 0
for c in name:
val += (val >> 3) + (ord(c) * fac)
val += (val >> 3) + (len(name) * fac)
return 49152 + (val % 2**14)
class FlexxHandler(RequestHandler):
""" Base class for Flexx' Tornado request handlers.
"""
def initialize(self, **kwargs):
# kwargs == dict set as third arg in url spec
pass
def write_error(self, status_code, **kwargs):
if status_code == 404: # does not work?
self.write('flexx.ui wants you to connect to root (404)')
else:
if config.browser_stacktrace:
msg = 'Flexx.ui encountered an error: <br /><br />'
try: # try providing a useful message; tough luck if this fails
type, value, tb = kwargs['exc_info']
tb_str = ''.join(traceback.format_tb(tb))
msg += '<pre>%s\n%s</pre>' % (tb_str, str(value))
except Exception:
pass
self.write(msg)
super().write_error(status_code, **kwargs)
def on_finish(self):
pass
class AppHandler(FlexxHandler):
""" Handler for http requests to get apps.
"""
@gen.coroutine
def get(self, full_path):
logger.debug('Incoming request at %r' % full_path)
ok_app_names = '__main__', '__default__', '__index__'
parts = [p for p in full_path.split('/') if p]
# Try getting regular app name
# Note: invalid part[0] can mean its a path relative to the main app
app_name = None
path = '/'.join(parts)
if parts:
if path.lower() == 'flexx': # reserved, redirect to other handler
return self.redirect('/flexx/')
if parts[0] in ok_app_names or manager.has_app_name(parts[0]):
app_name = parts[0]
path = '/'.join(parts[1:])
# If it does not look like an app, it might be that the request is for
# the main app. The main app can have sub-paths, but lets try to filter
# out cases that might make Flexx unnecessarily instantiate an app.
# In particular "favicon.ico" that browsers request by default (#385).
if app_name is None:
if len(parts) == 1 and '.' in full_path:
return self.redirect('/flexx/data/' + full_path)
# If we did not return ... assume this is the default app
app_name = '__main__'
# Try harder to produce an app
if app_name == '__main__':
app_name = manager.has_app_name('__main__')
elif '/' not in full_path:
return self.redirect('/%s/' % app_name) # ensure slash behind name
# Maybe the user wants an index? Otherwise error.
if not app_name:
if not parts:
app_name = '__index__'
else:
name = parts[0] if parts else '__main__'
return self.write('No app "%s" is currently hosted.' % name)
# We now have:
# * app_name: name of the app, must be a valid identifier, names
# with underscores are reserved for special things like assets,
# commands, etc.
# * path: part (possibly with slashes) after app_name
if app_name == '__index__':
self._get_index(app_name, path) # Index page
else:
self._get_app(app_name, path) # An actual app!
def _get_index(self, app_name, path):
if path:
return self.redirect('/flexx/__index__')
all_apps = ['<li><a href="%s/">%s</a></li>' % (name, name) for name in
manager.get_app_names()]
the_list = '<ul>%s</ul>' % ''.join(all_apps) if all_apps else 'no apps'
self.write('Index of available apps: ' + the_list)
def _get_app(self, app_name, path):
# Allow serving data/assets relative to app so that data can use
# relative paths just like exported apps.
if path.startswith(('flexx/data/', 'flexx/assets/')):
return self.redirect('/' + path)
# Get case-corrected app name if the app is known
correct_app_name = manager.has_app_name(app_name)
# Error or redirect if app name is not right
if not correct_app_name:
return self.write('No app "%s" is currently hosted.' % app_name)
if correct_app_name != app_name:
return self.redirect('/%s/%s' % (correct_app_name, path))
# Should we bind this app instance to a pre-created session?
session_id = self.get_argument('session_id', '')
if session_id:
# If session_id matches a pending app, use that session
session = manager.get_session_by_id(session_id)
if session and session.status == session.STATUS.PENDING:
self.write(get_page(session).encode())
else:
self.redirect('/%s/' % app_name) # redirect for normal serve
else:
# Create session - websocket will connect to it via session_id
session = manager.create_session(app_name, request=self.request)
self.write(get_page(session).encode())
class MainHandler(RequestHandler):
""" Handler for assets, commands, etc. Basically, everything for
which te path is clear.
"""
def _guess_mime_type(self, fname):
""" Set the mimetype if we can guess it from the filename.
"""
guess = mimetypes.guess_type(fname)[0]
if guess:
self.set_header("Content-Type", guess)
@gen.coroutine
def get(self, full_path):
logger.debug('Incoming request at %s' % full_path)
# Analyze path to derive components
# Note: invalid app name can mean its a path relative to the main app
parts = [p for p in full_path.split('/') if p]
if not parts:
return self.write('Root url for flexx: assets, assetview, data, cmd')
selector = parts[0]
path = '/'.join(parts[1:])
if selector in ('assets', 'assetview', 'data'):
self._get_asset(selector, path) # JS, CSS, or data
elif selector == 'info':
self._get_info(selector, path)
elif selector == 'cmd':
self._get_cmd(selector, path) # Execute (or ignore) command
else:
return self.write('Invalid url path "%s".' % full_path)
def _get_asset(self, selector, path):
# Get session id and filename
session_id, _, filename = path.partition('/')
session_id = '' if session_id == 'shared' else session_id
# Get asset provider: store or session
asset_provider = assets
if session_id and selector != 'data':
return self.write('Only supports shared assets, not %s' % filename)
elif session_id:
asset_provider = manager.get_session_by_id(session_id)
# Checks
if asset_provider is None:
return self.write('Invalid session %r' % session_id)
if not filename:
return self.write('Root dir for %s/%s' % (selector, path))
if selector == 'assets':
# If colon: request for a view of an asset at a certain line
if '.js:' in filename or '.css:' in filename or filename[0] == ':':
fname, where = filename.split(':')[:2]
return self.redirect('/flexx/assetview/%s/%s#L%s' %
(session_id or 'shared', fname.replace('/:', ':'), where))
# Retrieve asset
try:
res = asset_provider.get_asset(filename)
except KeyError:
self.write('Could not load asset %r' % filename)
else:
self._guess_mime_type(filename)
self.write(res.to_string())
elif selector == 'assetview':
# Retrieve asset
try:
res = asset_provider.get_asset(filename)
except KeyError:
return self.write('Could not load asset %r' % filename)
else:
res = res.to_string()
# Build HTML page
style = ('pre {display:block; width: 100%; padding:0; margin:0;} '
'a {text-decoration: none; color: #000; background: #ddd;} '
':target {background:#ada;} ')
lines = ['<html><head><style>%s</style></head><body>' % style]
for i, line in enumerate(res.splitlines()):
table = {ord('&'): '&', ord('<'): '<', ord('>'): '>'}
line = line.translate(table).replace('\t', ' ')
lines.append('<pre id="L%i"><a href="#L%i">%s</a> %s</pre>' %
(i+1, i+1, str(i+1).rjust(4).replace(' ', ' '), line))
lines.append('</body></html>')
return self.write('\n'.join(lines))
elif selector == 'data':
# todo: can/do we async write in case the data is large?
# Retrieve data
res = asset_provider.get_data(filename)
if res is None:
return self.send_error(404)
else:
self._guess_mime_type(filename) # so that images show up
return self.write(res)
else:
raise RuntimeError('Invalid asset type %r' % selector)
def _get_info(self, selector, info):
""" Provide some rudimentary information about the server.
Note that this is publicly accesible.
"""
runtime = time.time() - IMPORT_TIME
napps = len(manager.get_app_names())
nsessions = sum([len(manager.get_connections(x))
for x in manager.get_app_names()])
info = []
info.append('Runtime: %1.1f s' % runtime)
info.append('Number of apps: %i' % napps)
info.append('Number of sessions: %i' % nsessions)
info = '\n'.join(['<li>%s</li>' % i for i in info])
self.write('<ul>' + info + '</ul>')
def _get_cmd(self, selector, path):
""" Allow control of the server using http, but only from localhost!
"""
if not self.request.host.startswith('localhost:'):
self.write('403')
return
if not path:
self.write('No command given')
elif path == 'info':
info = dict(address=self.application._flexx_serving,
app_names=manager.get_app_names(),
nsessions=sum([len(manager.get_connections(x))
for x in manager.get_app_names()]),
)
self.write(json.dumps(info))
elif path == 'stop':
asyncio.get_event_loop().stop()
# loop = IOLoop.current()
# loop.add_callback(loop.stop)
self.write("Stopping event loop.")
else:
self.write('unknown command %r' % path)
class MessageCounter:
""" Simple class to count incoming messages and periodically log
the number of messages per second.
"""
def __init__(self):
self._collect_interval = 0.2 # period over which to collect messages
self._notify_interval = 3.0 # period on which to log the mps
self._window_interval = 4.0 # size of sliding window
self._mps = [(time.time(), 0)] # tuples of (time, count)
self._collect_count = 0
self._collect_stoptime = 0
self._stop = False
self._notify()
def trigger(self):
t = time.time()
if t < self._collect_stoptime:
self._collect_count += 1
else:
self._mps.append((self._collect_stoptime, self._collect_count))
self._collect_count = 1
self._collect_stoptime = t + self._collect_interval
def _notify(self):
mintime = time.time() - self._window_interval
self._mps = [x for x in self._mps if x[0] > mintime]
if self._mps:
n = sum([x[1] for x in self._mps])
T = self._mps[-1][0] - self._mps[0][0] + self._collect_interval
else:
n, T = 0, self._collect_interval
logger.debug('Websocket messages per second: %1.1f' % (n / T))
if not self._stop:
loop = asyncio.get_event_loop()
loop.call_later(self._notify_interval, self._notify)
def stop(self):
self._stop = True
class WSHandler(WebSocketHandler):
""" Handler for websocket.
"""
# https://tools.ietf.org/html/rfc6455#section-7.4.1
known_reasons = {1000: 'client done',
1001: 'client closed',
1002: 'protocol error',
1003: 'could not accept data',
}
# --- callbacks
def open(self, path=None):
""" Called when a new connection is made.
"""
if not hasattr(self, 'close_code'): # old version of Tornado?
self.close_code, self.close_reason = None, None
self._session = None
self._mps_counter = MessageCounter()
# Don't collect messages to send them more efficiently, just send asap
# self.set_nodelay(True)
if isinstance(path, bytes):
path = path.decode()
self.app_name = path.strip('/')
logger.debug('New websocket connection %s' % path)
if manager.has_app_name(self.app_name):
self.application._io_loop.spawn_callback(self.pinger1)
else:
self.close(1003, "Could not associate socket with an app.")
# todo: @gen.coroutine?
def on_message(self, message):
""" Called when a new message is received from JS.
This handles one message per event loop iteration.
We now have a very basic protocol for receiving messages,
we should at some point define a real formalized protocol.
"""
self._mps_counter.trigger()
try:
command = serializer.decode(message)
except Exception as err:
err.skip_tb = 1
logger.exception(err)
self._pongtime = time.time()
if self._session is None:
if command[0] == 'HI_FLEXX':
session_id = command[1]
try:
self._session = manager.connect_client(self, self.app_name,
session_id,
cookies=self.cookies)
except Exception as err:
self.close(1003, "Could not launch app: %r" % err)
raise
else:
try:
self._session._receive_command(command)
except Exception as err:
err.skip_tb = 1
logger.exception(err)
def on_close(self):
""" Called when the connection is closed.
"""
self.close_code = code = self.close_code or 0
reason = self.close_reason or self.known_reasons.get(code, '')
logger.debug('Websocket closed: %s (%i)' % (reason, code))
self._mps_counter.stop()
if self._session is not None:
manager.disconnect_client(self._session)
self._session = None # Allow cleaning up
@gen.coroutine
def pinger1(self):
""" Check for timeouts. This helps remove lingering false connections.
This uses the websocket's native ping-ping mechanism. On the
browser side, pongs work even if JS is busy. On the Python side
we perform a check whether we were really waiting or whether Python
was too busy to detect the pong.
"""
self._pongtime = time.time()
self._pingtime = pingtime = 0
while self.close_code is None:
dt = config.ws_timeout
# Ping, but don't spam
if pingtime <= self._pongtime:
self.ping(b'x')
pingtime = self._pingtime = time.time()
iters_since_ping = 0
yield gen.sleep(dt / 5)
# Check pong status
iters_since_ping += 1
if iters_since_ping < 5:
pass # we might have missed the pong
elif time.time() - self._pongtime > dt:
# Delay is so big that connection probably dropped.
# Note that a browser sends a pong even if JS is busy
logger.warning('Closing connection due to lack of pong')
self.close(1000, 'Conection timed out (no pong).')
return
def on_pong(self, data):
""" Implement the ws's on_pong() method. Called when our ping
is returned by the browser.
"""
self._pongtime = time.time()
# --- methods
def write_command(self, cmd):
assert isinstance(cmd, tuple) and len(cmd) >= 1
bb = serializer.encode(cmd)
try:
self.write_message(bb, binary=True)
except WebSocketClosedError:
self.close(1000, 'closed by client')
def close(self, *args):
try:
super().close(*args)
except TypeError:
super().close() # older Tornado
def close_this(self):
""" Call this to close the websocket
"""
self.close(1000, 'closed by server')
def check_origin(self, origin):
""" Handle cross-domain access; override default same origin policy.
"""
# http://www.tornadoweb.org/en/stable/_modules/tornado/websocket.html
#WebSocketHandler.check_origin
serving_host = self.request.headers.get("Host")
serving_hostname, _, serving_port = serving_host.partition(':')
connecting_host = urlparse(origin).netloc
connecting_hostname, _, connecting_port = connecting_host.partition(':')
serving_port = serving_port or '80'
connecting_port = connecting_port or '80'
if serving_hostname == 'localhost':
return True # Safe
elif serving_host == connecting_host:
return True # Passed most strict test, hooray!
elif serving_hostname == '0.0.0.0' and serving_port == connecting_port:
return True # host on all addressses; best we can do is check port
elif connecting_host in config.host_whitelist:
return True
else:
logger.warning('Connection refused from %s' % origin)
return False
| |
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from datetime import datetime
from hashlib import sha1
from os import getcwd, makedirs, walk
from os.path import getmtime, isdir, isfile, join
from shutil import rmtree
from time import time
import click
from platformio import app, exception, telemetry, util
from platformio.commands.lib import lib_install as cmd_lib_install
from platformio.commands.platforms import \
platforms_install as cmd_platforms_install
from platformio.libmanager import LibraryManager
from platformio.platforms.base import PlatformFactory
@click.command("run", short_help="Process project environments")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option("--target", "-t", multiple=True, metavar="<target>")
@click.option("--upload-port", metavar="<upload port>")
@click.option("--project-dir", "-d", default=getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True,
writable=True, resolve_path=True))
@click.option("--verbose", "-v", count=True, default=3)
@click.option("--disable-auto-clean", is_flag=True)
@click.pass_context
def cli(ctx, environment, target, upload_port, # pylint: disable=R0913,R0914
project_dir, verbose, disable_auto_clean):
with util.cd(project_dir):
config = util.get_project_config()
if not config.sections():
raise exception.ProjectEnvsNotAvailable()
unknown = set(environment) - set([s[4:] for s in config.sections()])
if unknown:
raise exception.UnknownEnvNames(", ".join(unknown))
# clean obsolete .pioenvs dir
if not disable_auto_clean:
_clean_pioenvs_dir()
results = []
for section in config.sections():
# skip main configuration section
if section == "platformio":
continue
if not section.startswith("env:"):
raise exception.InvalidEnvName(section)
envname = section[4:]
if environment and envname not in environment:
# echo("Skipped %s environment" % style(envname, fg="yellow"))
continue
if results:
click.echo()
options = {}
for k, v in config.items(section):
options[k] = v
ep = EnvironmentProcessor(
ctx, envname, options, target, upload_port, verbose)
results.append(ep.process())
if not all(results):
raise exception.ReturnErrorCode()
class EnvironmentProcessor(object):
RENAMED_OPTIONS = {
"INSTALL_LIBS": "LIB_INSTALL",
"IGNORE_LIBS": "LIB_IGNORE",
"USE_LIBS": "LIB_USE",
"LDF_CYCLIC": "LIB_DFCYCLIC",
"SRCBUILD_FLAGS": "SRC_BUILD_FLAGS"
}
def __init__(self, cmd_ctx, name, options, # pylint: disable=R0913
targets, upload_port, verbose):
self.cmd_ctx = cmd_ctx
self.name = name
self.options = self._validate_options(options)
self.targets = targets
self.upload_port = upload_port
self.verbose_level = int(verbose)
def process(self):
terminal_width, _ = click.get_terminal_size()
start_time = time()
click.echo("[%s] Processing %s (%s)" % (
datetime.now().strftime("%c"),
click.style(self.name, fg="cyan", bold=True),
", ".join(["%s: %s" % (k, v) for k, v in self.options.iteritems()])
))
click.secho("-" * terminal_width, bold=True)
result = self._run()
is_error = result['returncode'] != 0
summary_text = " Took %.2f seconds " % (time() - start_time)
half_line = "=" * ((terminal_width - len(summary_text) - 10) / 2)
click.echo("%s [%s]%s%s" % (
half_line,
(click.style(" ERROR ", fg="red", bold=True)
if is_error else click.style("SUCCESS", fg="green", bold=True)),
summary_text,
half_line
), err=is_error)
return not is_error
def _validate_options(self, options):
result = {}
for k, v in options.items():
_k = k.upper()
# process obsolete options
if _k in self.RENAMED_OPTIONS:
click.secho(
"Warning! `%s` option is deprecated and will be "
"removed in the next release! Please use "
"`%s` instead." % (
k, self.RENAMED_OPTIONS[_k].lower()),
fg="yellow"
)
k = self.RENAMED_OPTIONS[_k].lower()
result[k] = v
return result
def _get_build_variables(self):
variables = ["PIOENV=" + self.name]
if self.upload_port:
variables.append("UPLOAD_PORT=%s" % self.upload_port)
for k, v in self.options.items():
k = k.upper()
if k == "TARGETS" or (k == "UPLOAD_PORT" and self.upload_port):
continue
variables.append("%s=%s" % (k, v))
return variables
def _get_build_targets(self):
targets = []
if self.targets:
targets = [t for t in self.targets]
elif "targets" in self.options:
targets = self.options['targets'].split()
return targets
def _run(self):
if "platform" not in self.options:
raise exception.UndefinedEnvPlatform(self.name)
platform = self.options['platform']
build_vars = self._get_build_variables()
build_targets = self._get_build_targets()
telemetry.on_run_environment(self.options, build_targets)
# install platform and libs dependencies
_autoinstall_platform(self.cmd_ctx, platform, build_targets)
if "lib_install" in self.options:
_autoinstall_libs(self.cmd_ctx, self.options['lib_install'])
p = PlatformFactory.newPlatform(platform)
return p.run(build_vars, build_targets, self.verbose_level)
def _autoinstall_platform(ctx, platform, targets):
installed_platforms = PlatformFactory.get_platforms(installed=True).keys()
cmd_options = {}
p = PlatformFactory.newPlatform(platform)
if "uploadlazy" in targets:
upload_tools = p.pkg_aliases_to_names(["uploader"])
# platform without uploaders
if not upload_tools and platform in installed_platforms:
return
# uploaders are already installed
if set(upload_tools) <= set(p.get_installed_packages()):
return
cmd_options['skip_default_package'] = True
if upload_tools:
cmd_options['with_package'] = ["uploader"]
elif (platform in installed_platforms and
set(p.get_default_packages()) <= set(p.get_installed_packages())):
return
if (not app.get_setting("enable_prompts") or
click.confirm("The platform '%s' has not been installed yet. "
"Would you like to install it now?" % platform)):
ctx.invoke(cmd_platforms_install, platforms=[platform], **cmd_options)
def _autoinstall_libs(ctx, libids_list):
require_libs = [int(l.strip()) for l in libids_list.split(",")]
installed_libs = [
l['id'] for l in LibraryManager().get_installed().values()
]
not_intalled_libs = set(require_libs) - set(installed_libs)
if not require_libs or not not_intalled_libs:
return
if (not app.get_setting("enable_prompts") or
click.confirm(
"The libraries with IDs '%s' have not been installed yet. "
"Would you like to install them now?" %
", ".join([str(i) for i in not_intalled_libs])
)):
ctx.invoke(cmd_lib_install, libid=not_intalled_libs)
def _clean_pioenvs_dir():
pioenvs_dir = util.get_pioenvs_dir()
structhash_file = join(pioenvs_dir, "structure.hash")
proj_hash = calculate_project_hash()
# if project's config is modified
if (isdir(pioenvs_dir) and
getmtime(join(util.get_project_dir(), "platformio.ini")) >
getmtime(pioenvs_dir)):
rmtree(pioenvs_dir)
# check project structure
if isdir(pioenvs_dir) and isfile(structhash_file):
with open(structhash_file) as f:
if f.read() == proj_hash:
return
rmtree(pioenvs_dir)
if not isdir(pioenvs_dir):
makedirs(pioenvs_dir)
with open(structhash_file, "w") as f:
f.write(proj_hash)
def calculate_project_hash():
structure = []
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
if not isdir(d):
continue
for root, _, files in walk(d):
for f in files:
path = join(root, f)
if not any([s in path for s in (".git", ".svn")]):
structure.append(path)
return sha1(",".join(sorted(structure))).hexdigest() if structure else ""
| |
# encoding: UTF-8
import sys
from time import sleep
from vnib import IbApi
########################################################################
class TestApi(IbApi):
print sys._getframe().f_code.co_name
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(TestApi, self).__init__()
#----------------------------------------------------------------------
def nextValidId(self, orderId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def currentTime(self, time):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def connectAck(self):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def error(self, id_, errorCode, errorString):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def accountSummary(self, reqId, account, tag, value, curency):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def accountSummaryEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickPrice(self, tickerId, field, price, canAutoExecute):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickSize(self, tickerId, field, size):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickOptionComputation(self, tickerId, tickType, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickGeneric(self, tickerId, tickType, value):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickString(self, tickerId, tickType, value):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, totalDividends, holdDays, futureLastTradeDate, dividendImpact, dividendsToLastTradeDate):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def openOrder(self, orderId, contract, order, orderState):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def openOrderEnd(self):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def winError(self, str_, lastError):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def connectionClosed(self):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updateAccountValue(self, key, val, currency, accountName):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updateAccountTime(self, timeStamp):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def accountDownloadEnd(self, accountName):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def contractDetails(self, reqId, contractDetails):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def bondContractDetails(self, reqId, contractDetails):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def contractDetailsEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def execDetails(self, reqId, contract, execution):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def execDetailsEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updateMktDepth(self, id_, position, operation, side, price, size):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updateMktDepthL2(self, id_, position, marketMaker, operation, side, price, size):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def updateNewsBulletin(self, msgId, msgType, newsMessage, originExch):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def managedAccounts(self, accountsList):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def receiveFA(self, pFaDataType, cxml):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def historicalData(self, reqId, date, open_, high, low, close, volume, barCount, WAP, hasGaps):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def scannerParameters(self, xml):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def scannerDataEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def realtimeBar(self, reqId, time, open_, high, low, close, volume, wap, count):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def fundamentalData(self, reqId, data):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def deltaNeutralValidation(self, reqId, underComp):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def tickSnapshotEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def marketDataType(self, reqId, marketDataType):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def commissionReport(self, commissionReport):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def position(self, account, contract, position, avgCost):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def positionEnd(self):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def verifyMessageAPI(self, apiData):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def verifyCompleted(self, isSuccessful, errorText):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def displayGroupList(self, reqId, groups):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def displayGroupUpdated(self, reqId, contractInfo):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def verifyAndAuthMessageAPI(self, apiData, xyzChallange):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def verifyAndAuthCompleted(self, isSuccessful, errorText):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def positionMulti(self, reqId, account, modelCode, contract, pos, avgCost):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def positionMultiEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def accountUpdateMulti(self, reqId, account, modelCode, key, value, currency):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def accountUpdateMultiEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def securityDefinitionOptionalParameter(self, reqId, exchange, underlyingConId, tradingClass, multiplier, expirations, strikes):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def securityDefinitionOptionalParameterEnd(self, reqId):
print sys._getframe().f_code.co_name
print locals()
#----------------------------------------------------------------------
def softDollarTiers(self, reqId, tiers):
print sys._getframe().f_code.co_name
print locals()
if __name__ == '__main__':
api = TestApi()
n = api.eConnect('127.0.0.1', 7497, 123, False)
print n
#t = api.TwsConnectionTime()
#print t
#
sleep(1)
print 'req time'
api.reqCurrentTime()
#
sleep(1)
api.reqAccountSummary(9001, "All", "AccountType")
#print 'disconnect'
#api.eDisconnect()
raw_input()
| |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Transactions are defined as collection of classes, a Bean represents collection of Document
objects for a transaction with main and children.
Group actions like save, etc are performed on doclists
"""
import webnotes
from webnotes import _, msgprint
from webnotes.utils import cint, cstr, flt
from webnotes.model.doc import Document
try:
from startup.bean_handlers import on_method
except ImportError:
on_method = None
class DocstatusTransitionError(webnotes.ValidationError): pass
class BeanPermissionError(webnotes.ValidationError): pass
class TimestampMismatchError(webnotes.ValidationError): pass
class Bean:
"""
Collection of Documents with one parent and multiple children
"""
def __init__(self, dt=None, dn=None):
self.obj = None
self.ignore_permissions = False
self.ignore_children_type = []
self.ignore_links = False
self.ignore_validate = False
self.ignore_fields = False
self.ignore_mandatory = False
if isinstance(dt, basestring) and not dn:
dn = dt
if dt and dn:
self.load_from_db(dt, dn)
elif isinstance(dt, list):
self.set_doclist(dt)
elif isinstance(dt, dict):
self.set_doclist([dt])
def load_from_db(self, dt=None, dn=None):
"""
Load doclist from dt
"""
from webnotes.model.doc import getchildren
if not dt: dt = self.doc.doctype
if not dn: dn = self.doc.name
doc = Document(dt, dn)
# get all children types
tablefields = webnotes.model.meta.get_table_fields(dt)
# load chilren
doclist = webnotes.doclist([doc,])
for t in tablefields:
doclist += getchildren(doc.name, t[0], t[1], dt)
self.set_doclist(doclist)
if dt == dn:
self.convert_type(self.doc)
def __iter__(self):
return self.doclist.__iter__()
@property
def meta(self):
if not hasattr(self, "_meta"):
self._meta = webnotes.get_doctype(self.doc.doctype)
return self._meta
def from_compressed(self, data, docname):
from webnotes.model.utils import expand
self.set_doclist(expand(data))
def set_doclist(self, doclist):
for i, d in enumerate(doclist):
if isinstance(d, dict):
doclist[i] = Document(fielddata=d)
self.doclist = webnotes.doclist(doclist)
self.doc = self.doclist[0]
if self.obj:
self.obj.doclist = self.doclist
self.obj.doc = self.doc
def make_controller(self):
if self.obj:
# update doclist before running any method
self.obj.doclist = self.doclist
return self.obj
self.obj = webnotes.get_obj(doc=self.doc, doclist=self.doclist)
self.obj.bean = self
self.controller = self.obj
return self.obj
def get_controller(self):
return self.make_controller()
def to_dict(self):
return [d.fields for d in self.doclist]
def check_if_latest(self, method="save"):
from webnotes.model.meta import is_single
conflict = False
if not cint(self.doc.fields.get('__islocal')):
if is_single(self.doc.doctype):
modified = webnotes.conn.get_value(self.doc.doctype, self.doc.name, "modified")
if isinstance(modified, list):
modified = modified[0]
if cstr(modified) and cstr(modified) != cstr(self.doc.modified):
conflict = True
else:
tmp = webnotes.conn.sql("""select modified, docstatus from `tab%s`
where name="%s" for update"""
% (self.doc.doctype, self.doc.name), as_dict=True)
if not tmp:
webnotes.msgprint("""This record does not exist. Please refresh.""", raise_exception=1)
modified = cstr(tmp[0].modified)
if modified and modified != cstr(self.doc.modified):
conflict = True
self.check_docstatus_transition(tmp[0].docstatus, method)
if conflict:
webnotes.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.doc.modified)) \
+ _("Please refresh to get the latest document."), raise_exception=TimestampMismatchError)
def check_docstatus_transition(self, db_docstatus, method):
valid = {
"save": [0,0],
"submit": [0,1],
"cancel": [1,2],
"update_after_submit": [1,1]
}
labels = {
0: _("Draft"),
1: _("Submitted"),
2: _("Cancelled")
}
if not hasattr(self, "to_docstatus"):
self.to_docstatus = 0
if method != "runserverobj" and [db_docstatus, self.to_docstatus] != valid[method]:
webnotes.msgprint(_("Cannot change from") + ": " + labels[db_docstatus] + " > " + \
labels[self.to_docstatus], raise_exception=DocstatusTransitionError)
def check_links(self):
if self.ignore_links:
return
ref, err_list = {}, []
for d in self.doclist:
if not ref.get(d.doctype):
ref[d.doctype] = d.make_link_list()
err_list += d.validate_links(ref[d.doctype])
if err_list:
webnotes.msgprint("""[Link Validation] Could not find the following values: %s.
Please correct and resave. Document Not Saved.""" % ', '.join(err_list), raise_exception=1)
def update_timestamps_and_docstatus(self):
from webnotes.utils import now
ts = now()
user = webnotes.__dict__.get('session', {}).get('user') or 'Administrator'
for d in self.doclist:
if self.doc.fields.get('__islocal'):
if not d.owner:
d.owner = user
if not d.creation:
d.creation = ts
d.modified_by = user
d.modified = ts
if d.docstatus != 2 and self.to_docstatus >= int(d.docstatus): # don't update deleted
d.docstatus = self.to_docstatus
def prepare_for_save(self, method):
self.check_if_latest(method)
self.update_timestamps_and_docstatus()
self.update_parent_info()
if self.doc.fields.get("__islocal"):
# set name before validate
self.doc.set_new_name(self.get_controller())
self.run_method('before_insert')
if method != "cancel":
self.extract_images_from_text_editor()
self.check_links()
def update_parent_info(self):
idx_map = {}
is_local = cint(self.doc.fields.get("__islocal"))
if not webnotes.flags.in_import:
parentfields = [d.fieldname for d in self.meta.get({"doctype": "DocField", "fieldtype": "Table"})]
for i, d in enumerate(self.doclist[1:]):
if d.parentfield:
if not webnotes.flags.in_import:
if not d.parentfield in parentfields:
webnotes.msgprint("Bad parentfield %s" % d.parentfield,
raise_exception=True)
d.parenttype = self.doc.doctype
d.parent = self.doc.name
if not d.idx:
d.idx = idx_map.setdefault(d.parentfield, 0) + 1
else:
d.idx = cint(d.idx)
if is_local:
# if parent is new, all children should be new
d.fields["__islocal"] = 1
d.name = None
idx_map[d.parentfield] = d.idx
def run_method(self, method, *args, **kwargs):
self.make_controller()
if hasattr(self.controller, method):
getattr(self.controller, method)(*args, **kwargs)
if hasattr(self.controller, 'custom_' + method):
getattr(self.controller, 'custom_' + method)(*args, **kwargs)
notify(self, method)
self.set_doclist(self.controller.doclist)
def get_method(self, method):
self.make_controller()
return getattr(self.controller, method, None)
def insert(self):
self.doc.fields["__islocal"] = 1
self.set_defaults()
if webnotes.flags.in_test:
if self.meta.get_field("naming_series"):
self.doc.naming_series = "_T-" + self.doc.doctype + "-"
return self.save()
def insert_or_update(self):
if self.doc.name and webnotes.conn.exists(self.doc.doctype, self.doc.name):
return self.save()
else:
return self.insert()
def set_defaults(self):
if webnotes.flags.in_import:
return
new_docs = {}
new_doclist = []
for d in self.doclist:
if not d.doctype in new_docs:
new_docs[d.doctype] = webnotes.new_doc(d.doctype)
newd = webnotes.doc(new_docs[d.doctype].fields.copy())
newd.fields.update(d.fields)
new_doclist.append(newd)
self.set_doclist(new_doclist)
def has_read_perm(self):
return webnotes.has_permission(self.doc.doctype, "read", self.doc)
def save(self, check_links=1):
perm_to_check = "write"
if self.doc.fields.get("__islocal"):
perm_to_check = "create"
if not self.doc.owner:
self.doc.owner = webnotes.session.user
if self.ignore_permissions or webnotes.has_permission(self.doc.doctype, perm_to_check, self.doc):
self.to_docstatus = 0
self.prepare_for_save("save")
if not self.ignore_validate:
self.run_method('validate')
if not self.ignore_mandatory:
self.check_mandatory()
self.save_main()
self.save_children()
self.run_method('on_update')
if perm_to_check=="create":
self.run_method("after_insert")
else:
self.no_permission_to(_(perm_to_check.title()))
return self
def submit(self):
if self.ignore_permissions or webnotes.has_permission(self.doc.doctype, "submit", self.doc):
self.to_docstatus = 1
self.prepare_for_save("submit")
self.run_method('validate')
self.check_mandatory()
self.save_main()
self.save_children()
self.run_method('on_update')
self.run_method('on_submit')
else:
self.no_permission_to(_("Submit"))
return self
def cancel(self):
if self.ignore_permissions or webnotes.has_permission(self.doc.doctype, "cancel", self.doc):
self.to_docstatus = 2
self.prepare_for_save("cancel")
self.run_method('before_cancel')
self.save_main()
self.save_children()
self.run_method('on_cancel')
self.check_no_back_links_exist()
else:
self.no_permission_to(_("Cancel"))
return self
def update_after_submit(self):
if self.doc.docstatus != 1:
webnotes.msgprint("Only to called after submit", raise_exception=1)
if self.ignore_permissions or webnotes.has_permission(self.doc.doctype, "write", self.doc):
self.to_docstatus = 1
self.prepare_for_save("update_after_submit")
self.run_method('before_update_after_submit')
self.save_main()
self.save_children()
self.run_method('on_update_after_submit')
else:
self.no_permission_to(_("Update"))
return self
def save_main(self):
try:
self.doc.save(check_links = False, ignore_fields = self.ignore_fields)
except NameError, e:
webnotes.msgprint('%s "%s" already exists' % (self.doc.doctype, self.doc.name))
# prompt if cancelled
if webnotes.conn.get_value(self.doc.doctype, self.doc.name, 'docstatus')==2:
webnotes.msgprint('[%s "%s" has been cancelled]' % (self.doc.doctype, self.doc.name))
webnotes.errprint(webnotes.utils.getTraceback())
raise
def save_children(self):
child_map = {}
for d in self.doclist[1:]:
if d.fields.get("parent") or d.fields.get("parentfield"):
d.parent = self.doc.name # rename if reqd
d.parenttype = self.doc.doctype
d.save(check_links=False, ignore_fields = self.ignore_fields)
child_map.setdefault(d.doctype, []).append(d.name)
# delete all children in database that are not in the child_map
# get all children types
tablefields = webnotes.model.meta.get_table_fields(self.doc.doctype)
for dt in tablefields:
if dt[0] not in self.ignore_children_type:
cnames = child_map.get(dt[0]) or []
if cnames:
webnotes.conn.sql("""delete from `tab%s` where parent=%s and parenttype=%s and
name not in (%s)""" % (dt[0], '%s', '%s', ','.join(['%s'] * len(cnames))),
tuple([self.doc.name, self.doc.doctype] + cnames))
else:
webnotes.conn.sql("""delete from `tab%s` where parent=%s and parenttype=%s""" \
% (dt[0], '%s', '%s'), (self.doc.name, self.doc.doctype))
def delete(self):
webnotes.delete_doc(self.doc.doctype, self.doc.name)
def no_permission_to(self, ptype):
webnotes.msgprint(("%s (%s): " % (self.doc.name, _(self.doc.doctype))) + \
_("No Permission to ") + ptype, raise_exception=BeanPermissionError)
def check_no_back_links_exist(self):
from webnotes.model.utils import check_if_doc_is_linked
check_if_doc_is_linked(self.doc.doctype, self.doc.name, method="Cancel")
def check_mandatory(self):
missing = []
for doc in self.doclist:
for df in self.meta:
if df.doctype=="DocField" and df.reqd and df.parent==doc.doctype and df.fieldname!="naming_series":
msg = ""
if df.fieldtype == "Table":
if not self.doclist.get({"parentfield": df.fieldname}):
msg = _("Error") + ": " + _("Data missing in table") + ": " + _(df.label)
elif doc.fields.get(df.fieldname) is None:
msg = _("Error") + ": "
if doc.parentfield:
msg += _("Row") + (" # %s: " % (doc.idx,))
msg += _("Value missing for") + ": " + _(df.label)
if msg:
missing.append([msg, df.fieldname])
if missing:
for msg, fieldname in missing:
msgprint(msg)
raise webnotes.MandatoryError, ", ".join([fieldname for msg, fieldname in missing])
def convert_type(self, doc):
if doc.doctype==doc.name and doc.doctype!="DocType":
for df in self.meta.get({"doctype": "DocField", "parent": doc.doctype}):
if df.fieldtype in ("Int", "Check"):
doc.fields[df.fieldname] = cint(doc.fields.get(df.fieldname))
elif df.fieldtype in ("Float", "Currency"):
doc.fields[df.fieldname] = flt(doc.fields.get(df.fieldname))
doc.docstatus = cint(doc.docstatus)
def extract_images_from_text_editor(self):
from webnotes.utils.file_manager import extract_images_from_html
if self.doc.doctype != "DocType":
for df in self.meta.get({"doctype": "DocField", "parent": self.doc.doctype, "fieldtype":"Text Editor"}):
extract_images_from_html(self.doc, df.fieldname)
def clone(source_wrapper):
""" make a clone of a document"""
if isinstance(source_wrapper, list):
source_wrapper = Bean(source_wrapper)
new_wrapper = Bean(source_wrapper.doclist.copy())
if new_wrapper.doc.fields.get("amended_from"):
new_wrapper.doc.fields["amended_from"] = None
if new_wrapper.doc.fields.get("amendment_date"):
new_wrapper.doc.fields["amendment_date"] = None
for d in new_wrapper.doclist:
d.fields.update({
"name": None,
"__islocal": 1,
"docstatus": 0,
})
return new_wrapper
def notify(bean, method):
if on_method:
on_method(bean, method)
# for bc
def getlist(doclist, parentfield):
import webnotes.model.utils
return webnotes.model.utils.getlist(doclist, parentfield)
def copy_doclist(doclist, no_copy = []):
"""
Make a copy of the doclist
"""
import webnotes.model.utils
return webnotes.model.utils.copy_doclist(doclist, no_copy)
| |
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import ctypes
from inspect import signature
import PyDAQmx
import numpy as np
import six
dataTypeConversions = {
'<f8': 'F64',
'<i2': 'I16',
'<i4': 'I32',
'<u2': 'U16',
'<u4': 'U32',
'|u1': 'U8',
}
def init():
global NIDAQ
NIDAQ = _NIDAQ()
class _NIDAQ:
NIDAQ_CREATED = False
def __init__(self):
if _NIDAQ.NIDAQ_CREATED:
raise Exception("Will not create another nidaq instance--use the pre-existing NIDAQ object.")
self.devices = {}
# cached tasks used for scalar AO/AI operations
# (this shaves a few ms from the cost of reading/writing scalars)
self._scalarTasks = {}
# :TODO: initialize the driver
_NIDAQ.NIDAQ_CREATED = True
def __repr__(self):
return "<niDAQmx driver wrapper>"
def listDevices(self):
return self.GetSysDevNames().split(", ")
def __getattr__(self, attr):
if hasattr(PyDAQmx, attr):
if callable(getattr(PyDAQmx, attr)):
return lambda *args: self.call(attr, *args)
else:
return getattr(PyDAQmx, attr)
else:
raise NameError("{} not found among DAQmx constants or functions".format(attr))
def call(self, func, *args):
fn = getattr(PyDAQmx, func)
sig = signature(fn)
if "bufferSize" in sig.parameters:
buffSize = fn(data=None, bufferSize=0, *args)
ret = ctypes.create_string_buffer(b"\0" * buffSize)
if "reserved" in sig.parameters and len(args) < len(sig.parameters):
args += (None,)
fn(*args, data=ret, bufferSize=buffSize)
return ret.value.decode("utf-8")
elif len(args) < len(sig.parameters):
# Assume 1 missing arg, which is the pointer to the useful return value
# Assumptions are generally bad things...
cfuncInfo = PyDAQmx.function_dict["DAQmx" + func]
dataType = cfuncInfo["arg_type"][-1]
ret = dataType._type_()
if "data" in sig.parameters or "isTaskDone" in sig.parameters:
args += (dataType(ret),)
if "value" in sig.parameters and not func.startswith("Write"):
args += (dataType(ret),)
if "reserved" in sig.parameters and len(args) < len(sig.parameters):
args += (None,)
try:
fn(*args)
except:
print("Error drivers/nidaq/nidaq.py in setting args: args= ", args)
return ret.value
else:
return fn(*args)
# if func[:3] == "Get": # byref arguments will be handled automatically.
# # functions that return char* can be called with a null pointer to get the size of the buffer needed.
# if (argSig[-2][1] == ["char", "*"] or argSig[-2][1] == ["char", [-1]]) and argSig[-1][0] == "bufferSize":
# returnValue = argSig[-2][0]
# extra = {returnValue: None, "bufferSize": 0}
# buffSize = fn(*args, **extra)()
# ret = ctypes.create_string_buffer(b"\0" * buffSize)
# args += (ret, buffSize)
#
# # Python 3 requires bytes instead of str arguments here
# args = list(args)
# for i, arg in enumerate(args):
# if isinstance(arg, str):
# args[i] = arg.encode()
#
# # if there is a 'reserved' argument, it MUST be 0 (don't let clibrary try to fill it for us)
# if argSig[-1][0] == "reserved":
# ret = fn(*args, reserved=None)
# else:
# ret = fn(*args)
#
# errCode = ret()
#
# if errCode < 0:
# msg = "NiDAQ Error while running function '%s%s':\n%s" % (func, str(args), self.error())
# raise NIDAQError(errCode, msg)
# elif errCode > 0:
# print("NiDAQ Warning while running function '%s%s'" % (func, str(args)))
# print(self.error(errCode))
#
# if returnValue is not None: # If a specific return value was indicated, return it now
# return ret[returnValue]
#
# # otherwise, try to guess which values should be returned
# vals = ret.auto()
# if len(vals) == 1:
# return vals[0]
# elif len(vals) > 1:
# return vals
def _call(self, func, *args, **kargs):
try:
return getattr(self.nidaq, func)(*args, **kargs)
except:
print(func, args)
raise
def CreateTask(self, taskName):
taskPtr = PyDAQmx.TaskHandle()
self.call("CreateTask", taskName, taskPtr)
return taskPtr.value
def error(self, errCode=None):
"""Return a string with error information. If errCode is None, then the currently 'active' error will be used."""
if errCode is None:
err = self.GetExtendedErrorInfo().decode("ascii")
else:
err = self.GetErrorString(errCode).decode("ascii")
err.replace("\\n", "\n")
return err
def __del__(self):
self.__class__.NIDAQ_CREATED = False
def createTask(self, name=""):
return Task(self, name)
def createSuperTask(self):
from . import SuperTask
return SuperTask.SuperTask(self)
def interpretMode(self, mode):
modes = {
"rse": PyDAQmx.Val_RSE,
"nrse": PyDAQmx.Val_NRSE,
"diff": PyDAQmx.Val_Diff,
"chanperline": PyDAQmx.Val_ChanPerLine,
"chanforalllines": PyDAQmx.Val_ChanForAllLines,
}
if isinstance(mode, six.string_types):
mode = mode.lower()
mode = modes.get(mode, None)
return mode
def writeAnalogSample(self, chan, value, vRange=(-10.0, 10.0), timeout=10.0):
"""Set the value of an AO port"""
key = ("ao", chan)
t = self._scalarTasks.get(key, None)
if t is None:
t = self.createTask()
t.CreateAOVoltageChan(chan, "", vRange[0], vRange[1], PyDAQmx.Val_Volts, None)
self._scalarTasks[key] = t
t.WriteAnalogScalarF64(True, timeout, value)
def readAnalogSample(self, chan, mode=None, vRange=(-10.0, 10.0), timeout=10.0):
"""Get the value of an AI port"""
if mode is None:
mode = PyDAQmx.Val_Cfg_Default
else:
mode = self.interpretMode(mode)
key = ("ai", mode, chan)
t = self._scalarTasks.get(key, None)
if t is None:
t = self.createTask()
t.CreateAIVoltageChan(chan, "", mode, vRange[0], vRange[1], PyDAQmx.Val_Volts, None)
self._scalarTasks[key] = t
return t.ReadAnalogScalarF64(timeout)
def writeDigitalSample(self, chan, value, timeout=10.0):
"""Set the value of an AO or DO port"""
key = ("do", chan)
t = self._scalarTasks.get(key, None)
if t is None:
t = self.createTask()
t.CreateDOChan(chan, "", PyDAQmx.Val_ChanForAllLines)
self._scalarTasks[key] = t
t.WriteDigitalScalarU32(True, timeout, value)
def readDigitalSample(self, chan, timeout=10.0):
"""Get the value of a DI port"""
key = ("di", chan)
t = self._scalarTasks.get(key, None)
if t is None:
t = self.createTask()
t.CreateDIChan(chan, "", PyDAQmx.Val_ChanForAllLines)
self._scalarTasks[key] = t
return t.ReadDigitalScalarU32(timeout)
def listAIChannels(self, dev=None):
return self.GetDevAIPhysicalChans(dev).split(", ")
def listAOChannels(self, dev):
return self.GetDevAOPhysicalChans(dev).split(", ")
def listDILines(self, dev):
return self.GetDevDILines(dev).split(", ")
def listDIPorts(self, dev):
return self.GetDevDIPorts(dev).split(", ")
def listDOLines(self, dev):
return self.GetDevDOLines(dev).split(", ")
def listDOPorts(self, dev):
return self.GetDevDOPorts(dev).split(", ")
init()
chTypes = {
PyDAQmx.Val_AI: "AI",
PyDAQmx.Val_AO: "AO",
PyDAQmx.Val_DI: "DI",
PyDAQmx.Val_DO: "DO",
PyDAQmx.Val_CI: "CI",
PyDAQmx.Val_CO: "CO",
}
class Task:
# TaskHandle = None
def __init__(self, nidaq, taskName=""):
self.nidaq = nidaq
self.handle = self.nidaq.CreateTask(taskName)
def __del__(self):
self.nidaq.ClearTask(self.handle)
def __getattr__(self, attr):
func = getattr(self.nidaq, attr)
return lambda *args: func(self.handle, *args)
def __repr__(self):
return "<Task: %s>" % str(self.GetTaskChannels())
def start(self):
self.StartTask()
def stop(self):
self.StopTask()
def isDone(self):
return self.IsTaskDone()
def read(self, samples=None, timeout=10.0, dtype=None):
# reqSamps = samples
# if samples is None:
# samples = self.GetSampQuantSampPerChan()
# reqSamps = -1
if samples is None:
samples = self.GetSampQuantSampPerChan()
reqSamps = samples
numChans = self.GetTaskNumChans()
shape = (numChans, samples)
# print "Shape: ", shape
# Determine the default dtype based on the task type
tt = self.taskType()
if dtype is None:
if tt in [PyDAQmx.Val_AI, PyDAQmx.Val_AO]:
dtype = np.float64
elif tt in [PyDAQmx.Val_DI, PyDAQmx.Val_DO]:
dtype = np.uint32 # uint8 / 16 might be sufficient, but don't seem to work anyway.
else:
raise Exception("No default dtype for %s tasks." % chTypes[tt])
buf = np.empty(shape, dtype=dtype)
# samplesRead = ctypes.c_long()
# Determine the correct function name to call based on the dtype requested
fName = "Read"
if tt == PyDAQmx.Val_AI:
if dtype == np.float64:
fName += "Analog"
elif dtype in [np.int16, np.uint16, np.int32, np.uint32]:
fName += "Binary"
else:
raise Exception(
"dtype %s not allowed for AI channels (must be float64, int16, uint16, int32, or uint32)"
% str(dtype)
)
elif tt == PyDAQmx.Val_DI:
if dtype in [np.uint8, np.uint16, np.uint32]:
fName += "Digital"
else:
raise Exception("dtype %s not allowed for DI channels (must be uint8, uint16, or uint32)" % str(dtype))
elif tt == PyDAQmx.Val_CI:
fName += "Counter"
else:
raise Exception("read() not allowed for this task type (%s)" % chTypes[tt])
fName += dataTypeConversions[np.dtype(dtype).descr[0][1]]
self.SetReadRelativeTo(PyDAQmx.Val_FirstSample)
self.SetReadOffset(0)
nPts = getattr(self, fName)(reqSamps, timeout, PyDAQmx.Val_GroupByChannel, buf, buf.size, None)
return buf, nPts
def write(self, data, timeout=10.0):
numChans = self.GetTaskNumChans()
# samplesWritten = c_long()
# Determine the correct write function to call based on dtype and task type
fName = "Write"
tt = self.taskType()
if tt == PyDAQmx.Val_AO:
if data.dtype == np.float64:
fName += "Analog"
elif data.dtype in [np.int16, np.uint16]:
fName += "Binary"
else:
raise Exception(
"dtype %s not allowed for AO channels (must be float64, int16, or uint16)" % str(data.dtype)
)
elif tt == PyDAQmx.Val_DO:
if data.dtype in [np.uint8, np.uint16, np.uint32]:
fName += "Digital"
else:
raise Exception(
"dtype %s not allowed for DO channels (must be uint8, uint16, or uint32)" % str(data.dtype)
)
else:
raise Exception("write() not implemented for this task type (%s)" % chTypes[tt])
fName += dataTypeConversions[data.dtype.descr[0][1]]
nPts = getattr(self, fName)(data.size // numChans, False, timeout, PyDAQmx.Val_GroupByChannel, data, None)
return nPts
def absChannelName(self, n):
parts = n.lstrip("/").split("/")
devs = self.GetTaskDevices().split(", ")
if parts[0] not in devs:
if len(devs) != 1:
raise Exception("Cannot determine device to prepend on channel '%s'" % n)
parts = [devs[0]] + parts
return "/" + "/".join(parts)
def taskType(self):
# print "taskType:"
ch = self.GetTaskChannels().split(", ")
# print ch
ch = self.absChannelName(ch[0])
# print "First task channel:", ch
return self.GetChanType(ch)
def isInputTask(self):
return self.taskType() in [PyDAQmx.Val_AI, PyDAQmx.Val_DI]
def isOutputTask(self):
return self.taskType() in [PyDAQmx.Val_AO, PyDAQmx.Val_DO]
| |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbvserver_auditnslogpolicy_binding(base_resource) :
""" Binding class showing the auditnslogpolicy that can be bound to lbvserver.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._sc = ""
self._name = ""
self._gotopriorityexpression = ""
self._bindpoint = ""
self._invoke = False
self._labeltype = ""
self._labelname = ""
self.___count = 0
@property
def priority(self) :
"""Priority.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Priority.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression or other value specifying the next policy to be evaluated if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a priority number that is numerically higher than the highest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression or other value specifying the next policy to be evaluated if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a priority number that is numerically higher than the highest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the policy bound to the LB vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the policy bound to the LB vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def bindpoint(self) :
"""Bind point to which to bind the policy. Applicable only to compression, rewrite, and cache policies.
"""
try :
return self._bindpoint
except Exception as e:
raise e
@bindpoint.setter
def bindpoint(self, bindpoint) :
"""Bind point to which to bind the policy. Applicable only to compression, rewrite, and cache policies.
"""
try :
self._bindpoint = bindpoint
except Exception as e:
raise e
@property
def labeltype(self) :
"""Type of policy label to invoke. Applicable only to rewrite and cache policies. Available settings function as follows:
* reqvserver - Evaluate the request against the request-based policies bound to the specified virtual server.
* resvserver - Evaluate the response against the response-based policies bound to the specified virtual server.
* policylabel - invoke the request or response against the specified user-defined policy label.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
"""Type of policy label to invoke. Applicable only to rewrite and cache policies. Available settings function as follows:
* reqvserver - Evaluate the request against the request-based policies bound to the specified virtual server.
* resvserver - Evaluate the response against the response-based policies bound to the specified virtual server.
* policylabel - invoke the request or response against the specified user-defined policy label.
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the virtual server or user-defined policy label to invoke if the policy evaluates to TRUE.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
"""Name of the virtual server or user-defined policy label to invoke if the policy evaluates to TRUE.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def invoke(self) :
"""Invoke policies bound to a virtual server or policy label.
"""
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
"""Invoke policies bound to a virtual server or policy label.
"""
try :
self._invoke = invoke
except Exception as e:
raise e
@property
def sc(self) :
"""Use SureConnect on the virtual server.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._sc
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbvserver_auditnslogpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbvserver_auditnslogpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = lbvserver_auditnslogpolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.bindpoint = resource.bindpoint
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.labelname = resource.labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [lbvserver_auditnslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].bindpoint = resource[i].bindpoint
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].labelname = resource[i].labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = lbvserver_auditnslogpolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
deleteresource.bindpoint = resource.bindpoint
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [lbvserver_auditnslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
deleteresources[i].bindpoint = resource[i].bindpoint
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch lbvserver_auditnslogpolicy_binding resources.
"""
try :
obj = lbvserver_auditnslogpolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of lbvserver_auditnslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_auditnslogpolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count lbvserver_auditnslogpolicy_binding resources configued on NetScaler.
"""
try :
obj = lbvserver_auditnslogpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of lbvserver_auditnslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_auditnslogpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Sc:
ON = "ON"
OFF = "OFF"
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class lbvserver_auditnslogpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.lbvserver_auditnslogpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbvserver_auditnslogpolicy_binding = [lbvserver_auditnslogpolicy_binding() for _ in range(length)]
| |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Basic infrastructure for extracting localizable messages from source files.
This module defines an extensible system for collecting localizable message
strings from a variety of sources. A native extractor for Python source files
is builtin, extractors for other sources can be added using very simple plugins.
The main entry points into the extraction functionality are the functions
`extract_from_dir` and `extract_from_file`.
"""
import os
import sys
from tokenize import generate_tokens, COMMENT, NAME, OP, STRING
from babel.util import parse_encoding, pathmatch, relpath
from textwrap import dedent
__all__ = ['extract', 'extract_from_dir', 'extract_from_file']
__docformat__ = 'restructuredtext en'
GROUP_NAME = 'babel.extractors'
DEFAULT_KEYWORDS = {
'_': None,
'gettext': None,
'ngettext': (1, 2),
'ugettext': None,
'ungettext': (1, 2),
'dgettext': (2,),
'dngettext': (2, 3),
'N_': None
}
DEFAULT_MAPPING = [('**.py', 'python')]
empty_msgid_warning = (
'%s: warning: Empty msgid. It is reserved by GNU gettext: gettext("") '
'returns the header entry with meta information, not the empty string.')
def _strip_comment_tags(comments, tags):
"""Helper function for `extract` that strips comment tags from strings
in a list of comment lines. This functions operates in-place.
"""
def _strip(line):
for tag in tags:
if line.startswith(tag):
return line[len(tag):].strip()
return line
comments[:] = map(_strip, comments)
def extract_from_dir(dirname=os.getcwd(), method_map=DEFAULT_MAPPING,
options_map=None, keywords=DEFAULT_KEYWORDS,
comment_tags=(), callback=None, strip_comment_tags=False):
"""Extract messages from any source files found in the given directory.
This function generates tuples of the form:
``(filename, lineno, message, comments)``
Which extraction method is used per file is determined by the `method_map`
parameter, which maps extended glob patterns to extraction method names.
For example, the following is the default mapping:
>>> method_map = [
... ('**.py', 'python')
... ]
This basically says that files with the filename extension ".py" at any
level inside the directory should be processed by the "python" extraction
method. Files that don't match any of the mapping patterns are ignored. See
the documentation of the `pathmatch` function for details on the pattern
syntax.
The following extended mapping would also use the "genshi" extraction
method on any file in "templates" subdirectory:
>>> method_map = [
... ('**/templates/**.*', 'genshi'),
... ('**.py', 'python')
... ]
The dictionary provided by the optional `options_map` parameter augments
these mappings. It uses extended glob patterns as keys, and the values are
dictionaries mapping options names to option values (both strings).
The glob patterns of the `options_map` do not necessarily need to be the
same as those used in the method mapping. For example, while all files in
the ``templates`` folders in an application may be Genshi applications, the
options for those files may differ based on extension:
>>> options_map = {
... '**/templates/**.txt': {
... 'template_class': 'genshi.template:TextTemplate',
... 'encoding': 'latin-1'
... },
... '**/templates/**.html': {
... 'include_attrs': ''
... }
... }
:param dirname: the path to the directory to extract messages from
:param method_map: a list of ``(pattern, method)`` tuples that maps of
extraction method names to extended glob patterns
:param options_map: a dictionary of additional options (optional)
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of tags of translator comments to search for
and include in the results
:param callback: a function that is called for every file that message are
extracted from, just before the extraction itself is
performed; the function is passed the filename, the name
of the extraction method and and the options dictionary as
positional arguments, in that order
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:return: an iterator over ``(filename, lineno, funcname, message)`` tuples
:rtype: ``iterator``
:see: `pathmatch`
"""
if options_map is None:
options_map = {}
absname = os.path.abspath(dirname)
for root, dirnames, filenames in os.walk(absname):
for subdir in dirnames:
if subdir.startswith('.') or subdir.startswith('_'):
dirnames.remove(subdir)
dirnames.sort()
filenames.sort()
for filename in filenames:
filename = relpath(
os.path.join(root, filename).replace(os.sep, '/'),
dirname
)
for pattern, method in method_map:
if pathmatch(pattern, filename):
filepath = os.path.join(absname, filename)
options = {}
for opattern, odict in options_map.items():
if pathmatch(opattern, filename):
options = odict
if callback:
callback(filename, method, options)
for lineno, message, comments in \
extract_from_file(method, filepath,
keywords=keywords,
comment_tags=comment_tags,
options=options,
strip_comment_tags=
strip_comment_tags):
yield filename, lineno, message, comments
break
def extract_from_file(method, filename, keywords=DEFAULT_KEYWORDS,
comment_tags=(), options=None, strip_comment_tags=False):
"""Extract messages from a specific file.
This function returns a list of tuples of the form:
``(lineno, funcname, message)``
:param filename: the path to the file to extract messages from
:param method: a string specifying the extraction method (.e.g. "python")
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of translator tags to search for and include
in the results
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:param options: a dictionary of additional options (optional)
:return: the list of extracted messages
:rtype: `list`
"""
fileobj = open(filename, 'U')
try:
return list(extract(method, fileobj, keywords, comment_tags, options,
strip_comment_tags))
finally:
fileobj.close()
def extract(method, fileobj, keywords=DEFAULT_KEYWORDS, comment_tags=(),
options=None, strip_comment_tags=False):
"""Extract messages from the given file-like object using the specified
extraction method.
This function returns a list of tuples of the form:
``(lineno, message, comments)``
The implementation dispatches the actual extraction to plugins, based on the
value of the ``method`` parameter.
>>> source = '''# foo module
... def run(argv):
... print _('Hello, world!')
... '''
>>> from StringIO import StringIO
>>> for message in extract('python', StringIO(source)):
... print message
(3, u'Hello, world!', [])
:param method: a string specifying the extraction method (.e.g. "python");
if this is a simple name, the extraction function will be
looked up by entry point; if it is an explicit reference
to a function (of the form ``package.module:funcname`` or
``package.module.funcname``), the corresponding function
will be imported and used
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:return: the list of extracted messages
:rtype: `list`
:raise ValueError: if the extraction method is not registered
"""
func = None
if ':' in method or '.' in method:
if ':' not in method:
lastdot = method.rfind('.')
module, attrname = method[:lastdot], method[lastdot + 1:]
else:
module, attrname = method.split(':', 1)
func = getattr(__import__(module, {}, {}, [attrname]), attrname)
else:
try:
from pkg_resources import working_set
except ImportError:
# pkg_resources is not available, so we resort to looking up the
# builtin extractors directly
builtin = {'ignore': extract_nothing, 'python': extract_python}
func = builtin.get(method)
else:
for entry_point in working_set.iter_entry_points(GROUP_NAME,
method):
func = entry_point.load(require=True)
break
if func is None:
raise ValueError('Unknown extraction method %r' % method)
results = func(fileobj, keywords.keys(), comment_tags,
options=options or {})
for lineno, funcname, messages, comments in results:
if funcname:
spec = keywords[funcname] or (1,)
else:
spec = (1,)
if not isinstance(messages, (list, tuple)):
messages = [messages]
if not messages:
continue
# Validate the messages against the keyword's specification
msgs = []
invalid = False
# last_index is 1 based like the keyword spec
last_index = len(messages)
for index in spec:
if last_index < index:
# Not enough arguments
invalid = True
break
message = messages[index - 1]
if message is None:
invalid = True
break
msgs.append(message)
if invalid:
continue
first_msg_index = spec[0] - 1
if not messages[first_msg_index]:
# An empty string msgid isn't valid, emit a warning
where = '%s:%i' % (hasattr(fileobj, 'name') and \
fileobj.name or '(unknown)', lineno)
print >> sys.stderr, empty_msgid_warning % where
continue
messages = tuple(msgs)
if len(messages) == 1:
messages = messages[0]
if strip_comment_tags:
_strip_comment_tags(comments, comment_tags)
yield lineno, messages, comments
def extract_nothing(fileobj, keywords, comment_tags, options):
"""Pseudo extractor that does not actually extract anything, but simply
returns an empty list.
"""
return []
def extract_python(fileobj, keywords, comment_tags, options):
"""Extract messages from Python source code.
:param fileobj: the seekable, file-like object the messages should be
extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
funcname = lineno = message_lineno = None
call_stack = -1
buf = []
messages = []
translator_comments = []
in_def = in_translator_comments = False
comment_tag = None
encoding = parse_encoding(fileobj) or options.get('encoding', 'iso-8859-1')
tokens = generate_tokens(fileobj.readline)
for tok, value, (lineno, _), _, _ in tokens:
if call_stack == -1 and tok == NAME and value in ('def', 'class'):
in_def = True
elif tok == OP and value == '(':
if in_def:
# Avoid false positives for declarations such as:
# def gettext(arg='message'):
in_def = False
continue
if funcname:
message_lineno = lineno
call_stack += 1
elif in_def and tok == OP and value == ':':
# End of a class definition without parens
in_def = False
continue
elif call_stack == -1 and tok == COMMENT:
# Strip the comment token from the line
value = value.decode(encoding)[1:].strip()
if in_translator_comments and \
translator_comments[-1][0] == lineno - 1:
# We're already inside a translator comment, continue appending
translator_comments.append((lineno, value))
continue
# If execution reaches this point, let's see if comment line
# starts with one of the comment tags
for comment_tag in comment_tags:
if value.startswith(comment_tag):
in_translator_comments = True
translator_comments.append((lineno, value))
break
elif funcname and call_stack == 0:
if tok == OP and value == ')':
if buf:
messages.append(''.join(buf))
del buf[:]
else:
messages.append(None)
if len(messages) > 1:
messages = tuple(messages)
else:
messages = messages[0]
# Comments don't apply unless they immediately preceed the
# message
if translator_comments and \
translator_comments[-1][0] < message_lineno - 1:
translator_comments = []
yield (message_lineno, funcname, messages,
[comment[1] for comment in translator_comments])
funcname = lineno = message_lineno = None
call_stack = -1
messages = []
translator_comments = []
in_translator_comments = False
elif tok == STRING:
# Unwrap quotes in a safe manner, maintaining the string's
# encoding
# https://sourceforge.net/tracker/?func=detail&atid=355470&
# aid=617979&group_id=5470
value = eval('# coding=%s\n%s' % (encoding, value),
{'__builtins__':{}}, {})
if isinstance(value, str):
value = value.decode(encoding)
buf.append(value)
elif tok == OP and value == ',':
if buf:
messages.append(''.join(buf))
del buf[:]
else:
messages.append(None)
if translator_comments:
# We have translator comments, and since we're on a
# comma(,) user is allowed to break into a new line
# Let's increase the last comment's lineno in order
# for the comment to still be a valid one
old_lineno, old_comment = translator_comments.pop()
translator_comments.append((old_lineno+1, old_comment))
elif call_stack > 0 and tok == OP and value == ')':
call_stack -= 1
elif funcname and call_stack == -1:
funcname = None
elif tok == NAME and value in keywords:
funcname = value
def extract_javascript(fileobj, keywords, comment_tags, options):
"""Extract messages from JavaScript source code.
:param fileobj: the seekable, file-like object the messages should be
extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
from babel.messages.jslexer import tokenize, unquote_string
funcname = message_lineno = None
messages = []
last_argument = None
translator_comments = []
concatenate_next = False
encoding = options.get('encoding', 'utf-8')
last_token = None
call_stack = -1
for token in tokenize(fileobj.read().decode(encoding)):
if token.type == 'operator' and token.value == '(':
if funcname:
message_lineno = token.lineno
call_stack += 1
elif call_stack == -1 and token.type == 'linecomment':
value = token.value[2:].strip()
if translator_comments and \
translator_comments[-1][0] == token.lineno - 1:
translator_comments.append((token.lineno, value))
continue
for comment_tag in comment_tags:
if value.startswith(comment_tag):
translator_comments.append((token.lineno, value.strip()))
break
elif token.type == 'multilinecomment':
# only one multi-line comment may preceed a translation
translator_comments = []
value = token.value[2:-2].strip()
for comment_tag in comment_tags:
if value.startswith(comment_tag):
lines = value.splitlines()
if lines:
lines[0] = lines[0].strip()
lines[1:] = dedent('\n'.join(lines[1:])).splitlines()
for offset, line in enumerate(lines):
translator_comments.append((token.lineno + offset,
line))
break
elif funcname and call_stack == 0:
if token.type == 'operator' and token.value == ')':
if last_argument is not None:
messages.append(last_argument)
if len(messages) > 1:
messages = tuple(messages)
elif messages:
messages = messages[0]
else:
messages = None
# Comments don't apply unless they immediately precede the
# message
if translator_comments and \
translator_comments[-1][0] < message_lineno - 1:
translator_comments = []
if messages is not None:
yield (message_lineno, funcname, messages,
[comment[1] for comment in translator_comments])
funcname = message_lineno = last_argument = None
concatenate_next = False
translator_comments = []
messages = []
call_stack = -1
elif token.type == 'string':
new_value = unquote_string(token.value)
if concatenate_next:
last_argument = (last_argument or '') + new_value
concatenate_next = False
else:
last_argument = new_value
elif token.type == 'operator':
if token.value == ',':
if last_argument is not None:
messages.append(last_argument)
last_argument = None
else:
messages.append(None)
concatenate_next = False
elif token.value == '+':
concatenate_next = True
elif call_stack > 0 and token.type == 'operator' \
and token.value == ')':
call_stack -= 1
elif funcname and call_stack == -1:
funcname = None
elif call_stack == -1 and token.type == 'name' and \
token.value in keywords and \
(last_token is None or last_token.type != 'name' or
last_token.value != 'function'):
funcname = token.value
last_token = token
| |
"""ParametricJobs Table."""
import os
import re
import time
import json
import logging
import calendar
from datetime import datetime
import cherrypy
from sqlalchemy import Column, SmallInteger, Integer, Boolean, String, PickleType, TIMESTAMP, ForeignKey, Enum, CheckConstraint
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from lzproduction.rpc.DiracRPCClient import dirac_api_client, ParametricDiracJobClient
from lzproduction.utils.collections import list_splitter
from lzproduction.utils.tempfile_utils import temporary_runscript, temporary_macro
from ..utils import db_session
from ..statuses import LOCALSTATUS
from .SQLTableBase import SQLTableBase
from .JSONTableEncoder import JSONTableEncoder
from .DiracJobs import DiracJobs
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
UNIXDATE = re.compile(r'(?P<month>[0-9]{2})-(?P<day>[0-9]{2})-(?P<year>[0-9]{4})$')
@cherrypy.expose
class ParametricJobs(SQLTableBase):
"""Jobs SQL Table."""
__tablename__ = 'parametricjobs'
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
priority = Column(SmallInteger, CheckConstraint('priority >= 0 and priority < 10'), nullable=False, default=3)
app = Column(String(250))
app_version = Column(String(250))
site = Column(String(250), nullable=False, default='ANY')
sim_lfn_outputdir = Column(String(250))
mctruth_lfn_outputdir = Column(String(250))
macro = Column(String(250))
tag = Column(String(250))
njobs = Column(Integer)
nevents = Column(Integer)
seed = Column(Integer)
hour = Column(Integer)
fastnest_version = Column(String(250))
reduction_version = Column(String(250))
reduction_lfn_inputdir = Column(String(250))
reduction_lfn_outputdir = Column(String(250))
der_version = Column(String(250))
der_lfn_inputdir = Column(String(250))
der_lfn_outputdir = Column(String(250))
lzap_version = Column(String(250))
physics_version = Column(String(250))
lzap_lfn_inputdir = Column(String(250))
lzap_lfn_outputdir = Column(String(250))
request_id = Column(Integer, ForeignKey('requests.id'), nullable=False)
request = relationship("Requests", back_populates="parametricjobs")
status = Column(Enum(LOCALSTATUS), nullable=False)
reschedule = Column(Boolean, nullable=False, default=False)
timestamp = Column(TIMESTAMP, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow)
num_completed = Column(Integer, nullable=False, default=0)
num_failed = Column(Integer, nullable=False, default=0)
num_submitted = Column(Integer, nullable=False, default=0)
num_running = Column(Integer, nullable=False, default=0)
diracjobs = relationship("DiracJobs", back_populates="parametricjob")
@hybrid_property
def num_other(self):
"""Return the number of jobs in states other than the known ones."""
return self.njobs - (self.num_submitted + self.num_running + self.num_failed + self.num_completed)
def submit(self):
"""Submit parametric job."""
# problems here if not running simulation, there will be no macro so
# everything including context needs reworking.
# lfn_root = os.path.join('/lz/user/l/lzproduser.grid.hep.ph.ic.ac.uk', '_'.join(('-'.join((self.app, self.app_version)),
# '-'.join(('DER', self.der_version)))))
parametric_job = ParametricDiracJobClient()
dirac_ids = set()
if self.app_version is not None:
macro_name = os.path.splitext(os.path.basename(self.macro or ''))[0]
livetime_sec_per_beamon = 0.1132698957
livetimeperjob = str((self.nevents or 1) * livetime_sec_per_beamon)
unixtime = time.time()
match = UNIXDATE.search(macro_name)
if match is not None:
month, day, year = match.groups()
unixtime = str(int(calendar.timegm(datetime(int(year), int(month), int(day), 0, 0).utctimetuple())))
with temporary_runscript(root_version='5.34.32',
root_arch='slc6_gcc44_x86_64',
g4_version='4.10.03.p02',
se='UKI-LT2-IC-HEP-disk',
unixtime=unixtime,
livetimeperjob=livetimeperjob, **self) as runscript,\
temporary_macro(self.tag, self.macro or '', self.app, self.app_version, self.nevents) as macro:
logger.info("Submitting ParametricJob %s, macro: %s to DIRAC", self.id, self.macro)
for sublist in list_splitter(range(self.seed, self.seed + self.njobs), 1000):
with parametric_job as j:
j.setName(os.path.splitext(os.path.basename(macro))[0] + '-%(args)s')
j.setPriority(self.priority)
j.setPlatform('ANY')
j.setExecutable(os.path.basename(runscript),
os.path.basename(macro) + ' %(args)s' + ' %s' % self.nevents,
'lzproduction_output.log')
j.setInputSandbox([runscript, macro])
if self.site.endswith('2Processors'):
j.setDestination("LCG.UKI-LT2-IC-HEP.uk") # this should be done with site, tag = self.site.split(' ')
j.setTag('2Processors')
elif self.site.endswith("HighMem"):
j.setDestination("LCG.UKI-SOUTHGRID-RALPP.uk")
j.setTag('HighMem')
else:
j.setDestination(self.site)
j.setBannedSites(['LCG.UKI-LT2-Brunel.uk',
'LCG.UKI-NORTHGRID-LANCS-HEP.uk',
'LCG.UKI-SOUTHGRID-BRIS-HEP.uk',
'VAC.UKI-NORTHGRID-MAN-HEP.uk',
'VAC.UKI-NORTHGRID-LANCS-HEP.uk',
'VAC.UKI-NORTHGRID-LIV-HEP.uk',
'VAC.UKI-SOUTHGRID-BHAM-HEP.uk',
'VAC.UKI-SOUTHGRID-CAM-HEP.uk',
'VAC.UKI-SOUTHGRID-OX-HEP.uk',
'VAC.UKI-SCOTGRID-GLASGOW.uk',
'VAC.UKI-LT2-RHUL.uk',
'VAC.UKI-LT2-UCL-HEP.uk'])
j.setParameterSequence('args', sublist, addToWorkflow=False)
dirac_ids.update(parametric_job.subjob_ids)
else:
with temporary_runscript(root_version='5.34.32',
root_arch='slc6_gcc44_x86_64',
g4_version='4.10.03.p02',
se='UKI-LT2-IC-HEP-disk', **self) as runscript:
logger.info("Submitting ParametricJob %s, inputdir: %s to DIRAC", self.id, self.reduction_lfn_inputdir or self.der_lfn_inputdir or self.lzap_lfn_inputdir)
input_lfn_dir=self.reduction_lfn_inputdir or\
self.der_lfn_inputdir or \
self.lzap_lfn_inputdir
with parametric_job as j:
j.setName(input_lfn_dir + "_%(hour)s")
j.setPriority(self.priority)
j.setPlatform('ANY')
j.setExecutable(os.path.basename(runscript),
input_lfn_dir + " %(hour)s",
'lzanalysis_output.log')
j.setInputSandbox([runscript])
if self.site.endswith('2Processors'):
j.setDestination("LCG.UKI-LT2-IC-HEP.uk")
j.setTag('2Processors')
elif self.site.endswith("HighMem"):
j.setDestination("LCG.UKI-SOUTHGRID-RALPP.uk")
j.setTag('HighMem')
else:
j.setDestination(self.site)
j.setBannedSites(['LCG.UKI-LT2-Brunel.uk',
'LCG.UKI-NORTHGRID-LANCS-HEP.uk',
'LCG.UKI-SOUTHGRID-BRIS-HEP.uk',
'VAC.UKI-NORTHGRID-MAN-HEP.uk',
'VAC.UKI-NORTHGRID-LANCS-HEP.uk',
'VAC.UKI-NORTHGRID-LIV-HEP.uk',
'VAC.UKI-SOUTHGRID-BHAM-HEP.uk',
'VAC.UKI-SOUTHGRID-CAM-HEP.uk',
'VAC.UKI-SOUTHGRID-OX-HEP.uk',
'VAC.UKI-SCOTGRID-GLASGOW.uk',
'VAC.UKI-LT2-RHUL.uk',
'VAC.UKI-LT2-UCL-HEP.uk'])
j.setParameterSequence('hour', self.hour.split(','), addToWorkflow=False)
dirac_ids.update(parametric_job.subjob_ids)
with db_session() as session:
session.bulk_insert_mappings(DiracJobs, [{'id': i, 'parametricjob_id': self.id}
for i in dirac_ids])
def reset(self):
"""Reset parametric job."""
with db_session(reraise=False) as session:
dirac_jobs = session.query(DiracJobs).filter_by(parametricjob_id=self.id)
dirac_job_ids = [j.id for j in dirac_jobs.all()]
dirac_jobs.delete(synchronize_session=False)
with dirac_api_client() as dirac:
logger.info("Removing Dirac jobs %s from ParametricJob %s", dirac_job_ids, self.id)
dirac.kill(dirac_job_ids)
dirac.delete(dirac_job_ids)
def delete_dirac_jobs(self, session):
"""Delete associated DIRAC jobs."""
logger.info("Deleting DiracJobs for ParametricJob id: %s", self.id)
session.query(DiracJobs)\
.filter_by(parametricjob_id=self.id)\
.delete(synchronize_session=False)
def update_status(self):
"""Update the status of parametric job."""
local_statuses = DiracJobs.update_status(self)
# could just have DiracJobs return this... maybe better
# local_statuses = Counter(status.local_status for status in dirac_statuses.elements())
status = max(local_statuses or [self.status])
with db_session() as session:
this = session.merge(self)
this.status = status
this.num_completed = local_statuses[LOCALSTATUS.Completed]
this.num_failed = local_statuses[LOCALSTATUS.Failed]
this.num_submitted = local_statuses[LOCALSTATUS.Submitted]
this.num_running = local_statuses[LOCALSTATUS.Running]
this.reschedule = False
return status
@staticmethod
def GET(reqid): # pylint: disable=invalid-name
"""
REST Get method.
Returns all ParametricJobs for a given request id.
"""
logger.debug("In GET: reqid = %s", reqid)
requester = cherrypy.request.verified_user
with db_session() as session:
user_requests = session.query(ParametricJobs)\
.filter_by(request_id=reqid)
if not requester.admin:
user_requests = user_requests.join(ParametricJobs.request)\
.filter_by(requester_id=requester.id)
return json.dumps({'data': user_requests.all()}, cls=JSONTableEncoder)
@staticmethod
def PUT(jobid, reschedule=False): # pylint: disable=invalid-name
"""REST Put method."""
logger.debug("In PUT: jobid = %s, reschedule = %s", jobid, reschedule)
requester = cherrypy.request.verified_user
with db_session() as session:
query = session.query(ParametricJobs).filter_by(id=jobid)
if not requester.admin:
query = query.join(ParametricJobs.request)\
.filter_by(requester_id=requester.id)
try:
job = query.one()
except NoResultFound:
logger.error("No ParametricJobs found with id: %s", jobid)
except MultipleResultsFound:
logger.error("Multiple ParametricJobs found with id: %s", jobid)
else:
if reschedule and not job.reschedule:
job.reschedule = True
job.status = LOCALSTATUS.Submitting
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceTapConfigurationsOperations:
"""NetworkInterfaceTapConfigurationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_interface_name: str,
tap_configuration_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_interface_name: str,
tap_configuration_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified tap configuration from the NetworkInterface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
tap_configuration_name=tap_configuration_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_interface_name: str,
tap_configuration_name: str,
**kwargs: Any
) -> "_models.NetworkInterfaceTapConfiguration":
"""Get the specified tap configuration on a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceTapConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceTapConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_interface_name: str,
tap_configuration_name: str,
tap_configuration_parameters: "_models.NetworkInterfaceTapConfiguration",
**kwargs: Any
) -> "_models.NetworkInterfaceTapConfiguration":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceTapConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(tap_configuration_parameters, 'NetworkInterfaceTapConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_interface_name: str,
tap_configuration_name: str,
tap_configuration_parameters: "_models.NetworkInterfaceTapConfiguration",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkInterfaceTapConfiguration"]:
"""Creates or updates a Tap configuration in the specified NetworkInterface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:param tap_configuration_parameters: Parameters supplied to the create or update tap
configuration operation.
:type tap_configuration_parameters: ~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterfaceTapConfiguration or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceTapConfiguration"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
tap_configuration_name=tap_configuration_name,
tap_configuration_parameters=tap_configuration_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def list(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceTapConfigurationListResult"]:
"""Get all Tap configurations in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceTapConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceTapConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceTapConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations'} # type: ignore
| |
from itertools import chain
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.db.models.query import QuerySet
from django.utils.encoding import force_str
from guardian.conf import settings as guardian_settings
from guardian.ctypes import get_content_type
from guardian.utils import get_group_obj_perms_model, get_identity, get_user_obj_perms_model
def _get_pks_model_and_ctype(objects):
"""
Returns the primary keys, model and content type of an iterable of Django model objects.
Assumes that all objects are of the same content type.
"""
if isinstance(objects, QuerySet):
model = objects.model
pks = [force_str(pk) for pk in objects.values_list('pk', flat=True)]
ctype = get_content_type(model)
else:
pks = []
for idx, obj in enumerate(objects):
if not idx:
model = type(obj)
ctype = get_content_type(model)
pks.append(force_str(obj.pk))
return pks, model, ctype
class ObjectPermissionChecker:
"""
Generic object permissions checker class being the heart of
``django-guardian``.
.. note::
Once checked for single object, permissions are stored and we don't hit
database again if another check is called for this object. This is great
for templates, views or other request based checks (assuming we don't
have hundreds of permissions on a single object as we fetch all
permissions for checked object).
On the other hand, if we call ``has_perm`` for perm1/object1, then we
change permission state and call ``has_perm`` again for same
perm1/object1 on same instance of ObjectPermissionChecker we won't see a
difference as permissions are already fetched and stored within cache
dictionary.
"""
def __init__(self, user_or_group=None):
"""
Constructor for ObjectPermissionChecker.
:param user_or_group: should be an ``User``, ``AnonymousUser`` or
``Group`` instance
"""
self.user, self.group = get_identity(user_or_group)
self._obj_perms_cache = {}
def has_perm(self, perm, obj):
"""
Checks if user/group has given permission for object.
:param perm: permission as string, may or may not contain app_label
prefix (if not prefixed, we grab app_label from ``obj``)
:param obj: Django model instance for which permission should be checked
"""
if self.user and not self.user.is_active:
return False
elif self.user and self.user.is_superuser:
return True
if '.' in perm:
_, perm = perm.split('.', 1)
return perm in self.get_perms(obj)
def get_group_filters(self, obj):
User = get_user_model()
ctype = get_content_type(obj)
group_model = get_group_obj_perms_model(obj)
group_rel_name = group_model.permission.field.related_query_name()
if self.user:
fieldname = '{}__group__{}'.format(
group_rel_name,
User.groups.field.related_query_name(),
)
group_filters = {fieldname: self.user}
else:
group_filters = {'%s__group' % group_rel_name: self.group}
if group_model.objects.is_generic():
group_filters.update({
'%s__content_type' % group_rel_name: ctype,
'%s__object_pk' % group_rel_name: obj.pk,
})
else:
group_filters['%s__content_object' % group_rel_name] = obj
return group_filters
def get_user_filters(self, obj):
ctype = get_content_type(obj)
model = get_user_obj_perms_model(obj)
related_name = model.permission.field.related_query_name()
user_filters = {'%s__user' % related_name: self.user}
if model.objects.is_generic():
user_filters.update({
'%s__content_type' % related_name: ctype,
'%s__object_pk' % related_name: obj.pk,
})
else:
user_filters['%s__content_object' % related_name] = obj
return user_filters
def get_user_perms(self, obj):
ctype = get_content_type(obj)
perms_qs = Permission.objects.filter(content_type=ctype)
user_filters = self.get_user_filters(obj)
user_perms_qs = perms_qs.filter(**user_filters)
user_perms = user_perms_qs.values_list("codename", flat=True)
return user_perms
def get_group_perms(self, obj):
ctype = get_content_type(obj)
perms_qs = Permission.objects.filter(content_type=ctype)
group_filters = self.get_group_filters(obj)
group_perms_qs = perms_qs.filter(**group_filters)
group_perms = group_perms_qs.values_list("codename", flat=True)
return group_perms
def get_perms(self, obj):
"""
Returns list of ``codename``'s of all permissions for given ``obj``.
:param obj: Django model instance for which permission should be checked
"""
if self.user and not self.user.is_active:
return []
if guardian_settings.AUTO_PREFETCH:
self._prefetch_cache()
ctype = get_content_type(obj)
key = self.get_local_cache_key(obj)
if key not in self._obj_perms_cache:
# If auto-prefetching enabled, do not hit database
if guardian_settings.AUTO_PREFETCH:
return []
if self.user and self.user.is_superuser:
perms = list(
Permission.objects.filter(content_type=ctype).values_list("codename", flat=True)
)
elif self.user:
# Query user and group permissions separately and then combine
# the results to avoid a slow query
user_perms = self.get_user_perms(obj)
group_perms = self.get_group_perms(obj)
perms = list(set(chain(user_perms, group_perms)))
else:
perms = list(set(self.get_group_perms(obj)))
self._obj_perms_cache[key] = perms
return self._obj_perms_cache[key]
def get_local_cache_key(self, obj):
"""
Returns cache key for ``_obj_perms_cache`` dict.
"""
ctype = get_content_type(obj)
return (ctype.id, force_str(obj.pk))
def prefetch_perms(self, objects):
"""
Prefetches the permissions for objects in ``objects`` and puts them in the cache.
:param objects: Iterable of Django model objects
"""
if self.user and not self.user.is_active:
return []
User = get_user_model()
pks, model, ctype = _get_pks_model_and_ctype(objects)
if self.user and self.user.is_superuser:
perms = list(
Permission.objects.filter(content_type=ctype).values_list("codename", flat=True)
)
for pk in pks:
key = (ctype.id, force_str(pk))
self._obj_perms_cache[key] = perms
return True
group_model = get_group_obj_perms_model(model)
if self.user:
fieldname = 'group__{}'.format(
User.groups.field.related_query_name(),
)
group_filters = {fieldname: self.user}
else:
group_filters = {'group': self.group}
if group_model.objects.is_generic():
group_filters.update({
'content_type': ctype,
'object_pk__in': pks,
})
else:
group_filters.update({
'content_object_id__in': pks
})
if self.user:
model = get_user_obj_perms_model(model)
user_filters = {
'user': self.user,
}
if model.objects.is_generic():
user_filters.update({
'content_type': ctype,
'object_pk__in': pks
})
else:
user_filters.update({
'content_object_id__in': pks
})
# Query user and group permissions separately and then combine
# the results to avoid a slow query
user_perms_qs = model.objects.filter(**user_filters).select_related('permission')
group_perms_qs = group_model.objects.filter(**group_filters).select_related('permission')
perms = chain(user_perms_qs, group_perms_qs)
else:
perms = group_model.objects.filter(**group_filters).select_related('permission')
# initialize entry in '_obj_perms_cache' for all prefetched objects
for obj in objects:
key = self.get_local_cache_key(obj)
if key not in self._obj_perms_cache:
self._obj_perms_cache[key] = []
for perm in perms:
if type(perm).objects.is_generic():
key = (ctype.id, perm.object_pk)
else:
key = (ctype.id, force_str(perm.content_object_id))
self._obj_perms_cache[key].append(perm.permission.codename)
return True
@staticmethod
def _init_obj_prefetch_cache(obj, *querysets):
cache = {}
for qs in querysets:
perms = qs.select_related('permission__codename').values_list('content_type_id', 'object_pk',
'permission__codename')
for p in perms:
if p[:2] not in cache:
cache[p[:2]] = []
cache[p[:2]] += [p[2], ]
obj._guardian_perms_cache = cache
return obj, cache
def _prefetch_cache(self):
from guardian.utils import get_user_obj_perms_model, get_group_obj_perms_model
UserObjectPermission = get_user_obj_perms_model()
GroupObjectPermission = get_group_obj_perms_model()
if self.user:
obj = self.user
querysets = [
UserObjectPermission.objects.filter(user=obj),
GroupObjectPermission.objects.filter(group__user=obj)
]
else:
obj = self.group
querysets = [
GroupObjectPermission.objects.filter(group=obj),
]
if not hasattr(obj, '_guardian_perms_cache'):
obj, cache = self._init_obj_prefetch_cache(obj, *querysets)
else:
cache = obj._guardian_perms_cache
self._obj_perms_cache = cache
| |
from django.contrib.gis.geos import Polygon
import os
from osgeo import osr, gdal
from . import Driver, RASTER
from pandas import DataFrame, Panel
class GeotiffDriver(Driver):
@classmethod
def datatype(cls):
return RASTER
@classmethod
def supports_related(cls):
return False
def ready_data_resource(self, **kwargs):
"""Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver
ignores them"""
changed = self.cache_data_file(freshen='fresh' in kwargs and kwargs['fresh'])
if changed:
ds = gdal.Open(self.cached_basename + '.tif')
nx = ds.RasterXSize
ny = ds.RasterYSize
x0, dx, _, y0, _, dy = ds.GetGeoTransform()
xmin, xmax, ymin, ymax = (
x0,
x0+dx*nx,
y0 if dy > 0 else y0 + dy*ny,
y0 + dy*ny if dy > 0 else y0
)
crs = osr.SpatialReference()
crs.ImportFromWkt(ds.GetProjection())
self.resource.spatial_metadata.native_srs = crs.ExportToProj4()
e4326 = osr.SpatialReference()
e4326.ImportFromEPSG(4326)
crx = osr.CoordinateTransformation(crs, e4326)
x04326, y04326, _ = crx.TransformPoint(xmin, ymin)
x14326, y14326, _ = crx.TransformPoint(xmax, ymax)
self.resource.spatial_metadata.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326))
self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax))
self.resource.spatial_metadata.save()
self.resource.save()
return self.cache_path, (self.resource.slug, self.resource.spatial_metadata.native_srs, {
'type': 'gdal',
"file": self.cached_basename + '.tif'
})
def compute_spatial_metadata(self, **kwargs):
"""Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver
ignores them"""
self.cache_data_file(True)
ds = gdal.Open(self.cached_basename + '.tif')
nx = ds.RasterXSize
ny = ds.RasterYSize
x0, dx, _, y0, _, dy = ds.GetGeoTransform()
xmin, xmax, ymin, ymax = (
x0,
x0+dx*nx,
y0 if dy > 0 else y0 + dy*ny,
y0 + dy*ny if dy > 0 else y0
)
crs = osr.SpatialReference()
crs.ImportFromWkt(ds.GetProjection())
self.resource.spatial_metadata.native_srs = crs.ExportToProj4()
e4326 = osr.SpatialReference()
e4326.ImportFromEPSG(4326)
crx = osr.CoordinateTransformation(crs, e4326)
x04326, y04326, _ = crx.TransformPoint(xmin, ymin)
x14326, y14326, _ = crx.TransformPoint(xmax, ymax)
self.resource.spatial_metadata.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326))
self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax))
self.resource.spatial_metadata.save()
self.resource.save()
def get_data_fields(self, **kwargs):
dtypes = {
gdal.GDT_Byte : 'unsigned 8-bit integer',
gdal.GDT_Int16 : '16-bit integer',
gdal.GDT_Int32 : '32-bit integer',
gdal.GDT_Float64 : 'long double-precision float',
gdal.GDT_Unknown : 'string or other',
gdal.GDT_UInt32 : 'unsigned 32-bit integer (sometimes used for RGB in broken files)'
}
_, (_, _, result) = self.ready_data_resource(**kwargs)
ds = gdal.Open(result['file'])
n = ds.RasterCount
ret = []
for i in range(1, n+1):
band = ds.GetRasterBand(i)
ret.append((
str(i),
dtypes[band.DataType],
1
))
return ret
def get_data_for_point(self, wherex, wherey, srs, fuzziness=0, **kwargs):
"""
This can be used to implement GetFeatureInfo in WMS. It gets a value for a single point
:param wherex: The point in the destination coordinate system
:param wherey: The point in the destination coordinate system
:param srs: The destination coordinate system
:param fuzziness: UNUSED
:param kwargs: "band : int" refers to the band index, if present
:return: A tuple containing the values for the point
"""
_, (_, nativesrs, result) = self.ready_data_resource(**kwargs)
ds = gdal.Open(result['file'])
n = ds.RasterCount
ret = []
s_srs = osr.SpatialReference()
t_srs = osr.SpatialReference()
if srs.lower().startswith('epsg'):
s_srs.ImportFromEPSG(int(srs.split(':')[-1]))
else:
s_srs.ImportFromProj4(srs)
t_srs.ImportFromProj4(nativesrs)
crx = osr.CoordinateTransformation(s_srs, t_srs)
x1, y1, _ = crx.TransformPoint(wherex, wherey)
nx = ds.RasterXSize
ny = ds.RasterYSize
x0, dx, _, y0, _, dy = ds.GetGeoTransform()
xmin, xmax, ymin, ymax = (
x0,
x0+dx*nx,
y0 if dy > 0 else y0 + dy*ny,
y0 + dy*ny if dy > 0 else y0
)
if x1 < xmin or x1 > xmax or y1 < ymin or y1 > ymax:
return None
else:
xoff = int((x1-xmin) / (xmax-xmin) * nx)
yoff = int((y1-ymin) / (ymax-ymin) * ny)
return dict(zip(range(ds.RasterCount), ds.ReadAsArray(xoff,yoff,1,1).reshape(ds.RasterCount)))
def as_dataframe(self):
"""
Creates a dataframe object for a shapefile's main layer using layer_as_dataframe. This object is cached on disk for
layer use, but the cached copy will only be picked up if the shapefile's mtime is older than the dataframe's mtime.
:return: either a pandas DataFrame object if there is but one raster band or a Panel if there are N.
"""
dfx_path = self.get_filename('dfx')
tiff_path = self.get_filename('tif')
if hasattr(self, '_df'):
return self._df
elif os.path.exists(dfx_path) and os.stat(dfx_path).st_mtime >= os.stat(tiff_path).st_mtime:
self._df = Panel.read_pickle(dfx_path)
return self._df
else:
ds = gdal.Open(tiff_path)
try:
df= Panel(ds.ReadAsArray())
df.to_pickle(dfx_path)
self._df = df
return self._df
except:
df = DataFrame(ds.ReadAsArray())
df.to_pickle(dfx_path)
self._df = df
return self._df
@classmethod
def from_dataframe(cls, df, filename, srs, x0, dx, y0, dy, xt=0, yt=0, **metadata):
"""
Write an dataframe object out as a geotiff. Note that no interpolation will be done. Everything must be
ready as is to be made into an image, including rows, columns, and the final data type.
:param df: Either a DataFrame or Panel object from pandas
:param filename: The file to write a GeoTiff to
:param srs: The spatial reference system (an ogr.SpatialReference object)
:param x0: The left boundary of the object - part of the GeoTransform
:param dx: The increment in x for a single column (pixel)
:param y0: The north (or in unusual cases, south) boundary of the object
:param dy: The increment in y for a single row (pixel)
:param xt: The "skew" x-wise (part of the GDAL geotransform)
:param yt: THe "skew" y-wise (part of the GDAL geotransform)
:param metadata: Any key-value pairs you wish to set as metadata for the object
:return: None
"""
dtypes = {
'uint8' : gdal.GDT_Byte,
'int64' : gdal.GDT_Int16,
'float64' : gdal.GDT_Float64,
'object' : gdal.GDT_Unknown,
'datetime64[ns]' : gdal.GDT_UInt32
}
drv = gdal.GetDriverByName('GTiff')
if os.path.exists(filename):
os.unlink(filename)
cols = df.shape[2]
rows = df.shape[1]
bands = df.shape[0] if len(df.shape) == 3 else 1
dtype = df[0]
if hasattr(dtype, 'dtype'): # takes care of panel vs. frame
dtype = dtypes[ dtype.dtype.name ]
else:
dtype = dtypes[ dtype[0].dtype.name ]
ds = drv.Create(filename, cols, rows, bands, dtype)
ds.SetGeoTransform((x0, dx, xt, y0, yt, dy))
ds.SetProjection(srs.ExportToWkt())
for k, v in metadata.items():
ds.SetMetadata(k, v)
if isinstance(df, Panel):
for band in range(1, bands+1):
b = ds.GetRasterBand(band)
b.WriteArray(df[band-1].values)
else:
b = ds.GetRasterBand(1)
b.WriteArray(df.values)
del ds
driver = GeotiffDriver
| |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test logfiles with vibration output in cclib"""
import os
import unittest
from skip import skipForParser, skipForLogfile
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericIRTest(unittest.TestCase):
"""Generic vibrational frequency unittest"""
# Unit tests should normally give this value for the largest IR intensity.
max_IR_intensity = 100
# Unit tests may give these values for the largest force constant and reduced mass, respectively.
max_force_constant = 10.0
max_reduced_mass = 6.9
# reference zero-point correction from Gaussian 16 dvb_ir.out
zpve = 0.1771
def setUp(self):
"""Initialize the number of vibrational frequencies on a per molecule basis"""
self.numvib = 3*len(self.data.atomnos) - 6
def testbasics(self):
"""Are basic attributes correct?"""
self.assertEqual(self.data.natom, 20)
@skipForParser('NWChem', 'Not implemented for this parser')
def testvibdisps(self):
"""Are the dimensions of vibdisps consistent with numvib x N x 3"""
self.assertEqual(len(self.data.vibfreqs), self.numvib)
self.assertEqual(self.data.vibdisps.shape,
(self.numvib, len(self.data.atomnos), 3))
def testlengths(self):
"""Are the lengths of vibfreqs and vibirs (and if present, vibsyms, vibfconnsts and vibrmasses) correct?"""
self.assertEqual(len(self.data.vibfreqs), self.numvib)
if hasattr(self.data, 'vibirs'):
self.assertEqual(len(self.data.vibirs), self.numvib)
if hasattr(self.data, 'vibsyms'):
self.assertEqual(len(self.data.vibsyms), self.numvib)
if hasattr(self.data, 'vibfconsts'):
self.assertEqual(len(self.data.vibfconsts), self.numvib)
if hasattr(self.data, 'vibrmasses'):
self.assertEqual(len(self.data.vibrmasses), self.numvib)
def testfreqval(self):
"""Is the highest freq value 3630 +/- 200 wavenumber?"""
self.assertAlmostEqual(max(self.data.vibfreqs), 3630, delta=200)
@skipForParser('Psi4', 'Psi cannot print IR intensities')
def testirintens(self):
"""Is the maximum IR intensity 100 +/- 10 km/mol?"""
self.assertAlmostEqual(max(self.data.vibirs), self.max_IR_intensity, delta=10)
@skipForParser('ADF', 'ADF cannot print force constants')
@skipForParser('DALTON', 'DALTON cannot print force constants')
@skipForParser('GAMESS', 'GAMESS-US cannot print force constants')
@skipForParser('GAMESSUK', 'GAMESS-UK cannot print force constants')
@skipForParser('Molcas', 'Molcas cannot print force constants')
@skipForParser('Molpro', 'Molpro cannot print force constants')
@skipForParser('NWChem', 'Not implemented for this parser')
@skipForParser('ORCA', 'ORCA cannot print force constants')
@skipForParser('Turbomole', 'Turbomole cannot print force constants')
@skipForLogfile('Jaguar/Jaguar4.2', 'Data file does not contain force constants')
@skipForLogfile('Psi4/Psi4-1.0', 'Data file contains vibrational info with cartesian coordinates')
def testvibfconsts(self):
"""Is the maximum force constant 10. +/- 0.1 mDyn/angstrom?"""
self.assertAlmostEqual(max(self.data.vibfconsts), self.max_force_constant, delta=0.1)
@skipForParser('ADF', 'ADF cannot print reduced masses')
@skipForParser('DALTON', 'DALTON cannot print reduced masses')
@skipForParser('GAMESSUK', 'GAMESSUK cannot print reduced masses')
@skipForParser('Molpro', 'Molpro cannot print reduced masses')
@skipForParser('NWChem', 'Not implemented for this parser')
@skipForParser('ORCA', 'ORCA cannot print reduced masses')
@skipForLogfile('GAMESS/PCGAMESS', 'Data file does not contain reduced masses')
@skipForLogfile('Psi4/Psi4-1.0', 'Data file does not contain reduced masses')
def testvibrmasses(self):
"""Is the maximum reduced mass 6.9 +/- 0.1 daltons?"""
self.assertAlmostEqual(max(self.data.vibrmasses), self.max_reduced_mass, delta=0.1)
@skipForParser('Psi3', 'not implemented yet')
def testzeropointcorrection(self):
"""Is the zero-point correction correct?"""
self.assertAlmostEqual(self.data.zpve, self.zpve, delta=1.0e-3)
class ADFIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# ???
def testzeropointcorrection(self):
"""Is the zero-point correction correct?"""
self.assertAlmostEqual(self.data.zpve, self.zpve, delta=1.0e-2)
class FireflyIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
max_IR_intensity = 135
# ???
zpve = 0.1935
class GaussianIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
def testvibsyms(self):
"""Is the length of vibsyms correct?"""
self.assertEqual(len(self.data.vibsyms), self.numvib)
def testzeropointcorrection(self):
# reference zero-point correction from dvb_ir.out
zpve = 0.1771
"""Is the zero-point correction correct?"""
self.assertAlmostEqual(self.data.zpve, zpve, delta=0.001)
entropy_places = 6
enthalpy_places = 3
freeenergy_places = 3
def testtemperature(self):
"""Is the temperature 298.15 K?"""
self.assertAlmostEqual(298.15, self.data.temperature)
def testpressure(self):
"""Is the pressure 1 atm?"""
self.assertAlmostEqual(1, self.data.pressure)
def testentropy(self):
"""Is the entropy reasonable"""
self.assertAlmostEqual(0.0001462623335480945, self.data.entropy, self.entropy_places)
def testenthalpy(self):
"""Is the enthalpy reasonable"""
self.assertAlmostEqual(-382.12130688525264, self.data.enthalpy, self.enthalpy_places)
def testfreeenergy(self):
"""Is the freeenergy reasonable"""
self.assertAlmostEqual(-382.164915, self.data.freeenergy, self.freeenergy_places)
def testfreeenergyconsistency(self):
"""Does G = H - TS hold"""
self.assertAlmostEqual(self.data.enthalpy - self.data.temperature * self.data.entropy, self.data.freeenergy, self.freeenergy_places)
class JaguarIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# Jagur outputs vibrational info with cartesian coordinates
max_force_constant = 3.7
max_reduced_mass = 2.3
def testvibsyms(self):
"""Is the length of vibsyms correct?"""
self.assertEqual(len(self.data.vibsyms), self.numvib)
class MolcasIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
max_IR_intensity = 65
zpve = 0.1783
entropy_places = 6
enthalpy_places = 3
freeenergy_places = 3
def testtemperature(self):
"""Is the temperature 298.15 K?"""
self.assertAlmostEqual(298.15, self.data.temperature)
def testpressure(self):
"""Is the pressure 1 atm?"""
self.assertAlmostEqual(1, self.data.pressure)
def testentropy(self):
"""Is the entropy reasonable"""
self.assertAlmostEqual(0.00013403320476271246, self.data.entropy, self.entropy_places)
def testenthalpy(self):
"""Is the enthalpy reasonable"""
self.assertAlmostEqual(-382.11385, self.data.enthalpy, self.enthalpy_places)
def testfreeenergy(self):
"""Is the freeenergy reasonable"""
self.assertAlmostEqual(-382.153812, self.data.freeenergy, self.freeenergy_places)
def testfreeenergyconsistency(self):
"""Does G = H - TS hold"""
self.assertAlmostEqual(self.data.enthalpy - self.data.temperature * self.data.entropy, self.data.freeenergy, self.freeenergy_places)
class NWChemIRTest(GenericIRTest):
"""Generic imaginary vibrational frequency unittest"""
def setUp(self):
"""Initialize the number of vibrational frequencies on a per molecule basis"""
self.numvib = 3*len(self.data.atomnos)
class OrcaIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# ORCA has a bug in the intensities for version < 4.0
max_IR_intensity = 215
zpve = 0.1921
enthalpy_places = 3
entropy_places = 6
freeenergy_places = 3
def testtemperature(self):
"""Is the temperature 298.15 K?"""
self.assertAlmostEqual(298.15, self.data.temperature)
def testpressure(self):
"""Is the pressure 1 atm?"""
self.assertAlmostEqual(1, self.data.pressure)
def testenthalpy(self):
"""Is the enthalpy reasonable"""
self.assertAlmostEqual(-381.85224835, self.data.enthalpy, self.enthalpy_places)
def testentropy(self):
"""Is the entropy reasonable"""
self.assertAlmostEqual(0.00012080325339594164, self.data.entropy, self.entropy_places)
def testfreeenergy(self):
"""Is the freeenergy reasonable"""
self.assertAlmostEqual(-381.88826585, self.data.freeenergy, self.freeenergy_places)
def testfreeenergyconsistency(self):
"""Does G = H - TS hold"""
self.assertAlmostEqual(self.data.enthalpy - self.data.temperature * self.data.entropy, self.data.freeenergy, self.freeenergy_places)
class QChemIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
enthalpy_places = 3
entropy_places = 6
freeenergy_places = 3
def testtemperature(self):
"""Is the temperature 298.15 K?"""
self.assertEqual(298.15, self.data.temperature)
def testpressure(self):
"""Is the pressure 1 atm?"""
self.assertAlmostEqual(1, self.data.pressure)
def testenthalpy(self):
"""Is the enthalpy reasonable"""
self.assertAlmostEqual(0.1871270552135131, self.data.enthalpy, self.enthalpy_places)
def testentropy(self):
"""Is the entropy reasonable"""
self.assertAlmostEqual(0.00014667348271900577, self.data.entropy, self.entropy_places)
def testfreeenergy(self):
"""Is the freeenergy reasonable"""
self.assertAlmostEqual(0.14339635634084155, self.data.freeenergy, self.freeenergy_places)
# Molecular mass of DVB in mD.
molecularmass = 130078.25
def testatommasses(self):
"""Do the atom masses sum up to the molecular mass (130078.25+-0.1mD)?"""
mm = 1000*sum(self.data.atommasses)
self.assertAlmostEqual(mm, 130078.25, delta=0.1, msg = "Molecule mass: %f not 130078 +- 0.1mD" % mm)
def testhessian(self):
"""Do the frequencies from the Hessian match the printed frequencies?"""
def testfreeenergyconsistency(self):
"""Does G = H - TS hold"""
self.assertAlmostEqual(self.data.enthalpy - self.data.temperature * self.data.entropy, self.data.freeenergy, self.freeenergy_places)
class GamessIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# Molecular mass of DVB in mD.
molecularmass = 130078.25
enthalpy_places = 3
entropy_places = 6
freeenergy_places = 3
def testatommasses(self):
"""Do the atom masses sum up to the molecular mass (130078.25+-0.1mD)?"""
mm = 1000*sum(self.data.atommasses)
self.assertAlmostEqual(mm, 130078.25, delta=0.1, msg = "Molecule mass: %f not 130078 +- 0.1mD" % mm)
def testtemperature(self):
"""Is the temperature 298.15 K?"""
self.assertAlmostEqual(298.15, self.data.temperature)
def testpressure(self):
"""Is the pressure 1 atm?"""
self.assertAlmostEqual(1, self.data.pressure)
def testenthalpy(self):
"""Is the enthalpy reasonable"""
self.assertAlmostEqual(-381.86372805188300, self.data.enthalpy, self.enthalpy_places)
def testentropy(self):
"""Is the entropy reasonable"""
self.assertAlmostEqual(0.00014875961938, self.data.entropy, self.entropy_places)
def testfreeenergy(self):
"""Is the freeenergy reasonable"""
self.assertAlmostEqual(-381.90808120060200, self.data.freeenergy, self.freeenergy_places)
def testfreeenergyconsistency(self):
"""Does G = H - TS hold"""
self.assertAlmostEqual(self.data.enthalpy - self.data.temperature * self.data.entropy, self.data.freeenergy, self.freeenergy_places)
class Psi4IRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# RHF is used for Psi4 IR test data instead of B3LYP
max_force_constant = 9.37
zpve = 0.1917
class TurbomoleIRTest(GenericIRTest):
"""Customized vibrational frequency unittest"""
# ???
zpve = 0.1725
class GenericIRimgTest(unittest.TestCase):
"""Generic imaginary vibrational frequency unittest"""
def setUp(self):
"""Initialize the number of vibrational frequencies on a per molecule basis"""
self.numvib = 3*len(self.data.atomnos) - 6
def testvibdisps(self):
"""Are the dimensions of vibdisps consistent with numvib x N x 3"""
self.assertEqual(self.data.vibdisps.shape,
(self.numvib, len(self.data.atomnos), 3))
def testlengths(self):
"""Are the lengths of vibfreqs and vibirs correct?"""
self.assertEqual(len(self.data.vibfreqs), self.numvib)
self.assertEqual(len(self.data.vibirs), self.numvib)
def testfreqval(self):
"""Is the lowest freq value negative?"""
self.assertTrue(self.data.vibfreqs[0] < 0)
## def testmaxvibdisps(self):
## """What is the maximum value of displacement for a H vs a C?"""
## Cvibdisps = compress(self.data.atomnos==6, self.data.vibdisps, 1)
## Hvibdisps = compress(self.data.atomnos==1, self.data.vibdisps, 1)
## self.assertEqual(max(abs(Cvibdisps).flat), 1.0)
class GenericRamanTest(unittest.TestCase):
"""Generic Raman unittest"""
# This value is in amu.
max_raman_intensity = 575
def setUp(self):
"""Initialize the number of vibrational frequencies on a per molecule basis"""
self.numvib = 3*len(self.data.atomnos) - 6
def testlengths(self):
"""Is the length of vibramans correct?"""
self.assertEqual(len(self.data.vibramans), self.numvib)
# The tolerance for this number has been increased, since ORCA
# failed to make it inside +/-5, but it would be nice in the future
# to determine is it's not too much work whether this is due to
# algorithmic differences, or to differences in the input basis set
# or coordinates. The first would be OK, but in the second case the
# unit test jobs should be made more comparable. With cclib, we first
# of all want to succeed in parsing, but would also like to remain
# as comparable between programs as possible (for these tests).
# Note also that this value is adjusted for Gaussian and DALTON - why?
def testramanintens(self):
"""Is the maximum Raman intensity correct?"""
self.assertAlmostEqual(max(self.data.vibramans), self.max_raman_intensity, delta=8)
# We used to test this, but it seems to vary wildly between
# programs... perhaps we could use it if we knew why...
#self.assertInside(self.data.vibramans[1], 2.6872, 0.0001)
def testvibdisps(self):
"""Is the length and value of vibdisps correct?"""
assert hasattr(self.data, "vibdisps")
assert len(self.data.vibdisps) == 54
class DALTONRamanTest(GenericRamanTest):
"""Customized Raman unittest"""
max_raman_intensity = 745
class GaussianRamanTest(GenericRamanTest):
"""Customized Raman unittest"""
max_raman_intensity = 1066
class OrcaRamanTest(GenericRamanTest):
"""Customized Raman unittest"""
max_raman_intensity = 1045
class QChemRamanTest(GenericRamanTest):
"""Customized Raman unittest"""
max_raman_intensity = 588
if __name__=="__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['vib'])
suite.testall()
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""
Fama-McBeth regressions
=======================
This module provides two estimators of risk premia in Fama-McBeth regressions:
2-step OLS and GMM.
The inspiration for this code comes from Chapters 27.2-3 of
Kevin Sheppard's book "Python for Econometrics":
<http://www.kevinsheppard.com/images/0/09/Python_introduction.pdf>
The data with Fama-French risk factors:
<http://www.kevinsheppard.com/images/0/0b/FamaFrench.zip>
"""
from __future__ import print_function, division
import warnings
import numpy as np
from scipy.stats import chi2
from mygmm import GMM
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
__status__ = "Development"
__all__ = ['FamaMcBeth', 'convert_theta_to1d']
class FamaMcBeth(object):
r"""Fama-McBeth regressions.
Time series regression:
:math:`E\left[R_{it} - \beta_i * f_t | f_t\right] = 0`
and
Cross-sectional regression:
:math:`E\left[R_{it} - \beta_i * \gamma\right] = 0`
Attributes
----------
factors : (dim_t, dim_k) array
Explanatory factors in the regression,
including constant in the first place
excess_ret : (dim_t, dim_n) array
Portfolio excess returns that we are trying to explain
Methods
-------
two_step_ols
Two-step OLS estimator
compute_theta_var
Estimate variance of the 2-step OLS estimator
gamma_tstat
T-statistics for risk premia estimates
jtest
J-test for misspecification of the model
"""
def __init__(self, factors, excess_ret):
"""Initialize the class.
Parameters
----------
factors : (dim_t, dim_k) array
Explanatory factors in the regression,
including constant in the first place
excess_ret : (dim_t, dim_n) array
Portfolio excess returns that we are trying to explain
"""
# Store data internally.
self.factors = factors
self.excess_ret = excess_ret
def __get_dimensions(self):
"""Get essential dimentions of the data.
Returns
-------
dim_t : int
Time
dim_n : int
Number of portfolio returns to be explained
dim_k : int
Number of explanatory factors, including constant
"""
dim_t, dim_n = self.excess_ret.shape
dim_k = self.factors.shape[1]
return dim_t, dim_n, dim_k
def two_step_ols(self):
"""Two-step OLS estimator.
Returns
-------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
gamma_stde : (dim_k, ) array
Standard errors
gamma_rsq : (1, ) array
R-squared for one cross-sectional regression
theta_stde : (dim_k, dim_n) array
Standard errors
theta_rsq : (dim_n, ) array
R-squared for each time series regression
"""
dim_t, dim_n, dim_k = self.__get_dimensions()
# Time series regressions
# (dim_k, dim_n) array. This theta includes intercepts alpha
theta, resid = np.linalg.lstsq(self.factors, self.excess_ret)[:2]
# float
theta_rmse = (resid / dim_t) ** .5
# float
theta_rsq = 100 * (1 - theta_rmse**2 / self.excess_ret.var(0))
# (dim_n, ) array
alpha = theta[0]
# (dim_k-1, dim_n) array
beta = theta[1:]
# (dim_n, ) array
mean_excess_ret = self.excess_ret.mean(0)
# Cross-section regression
# (dim_k-1, ) array
gamma, resid = np.linalg.lstsq(beta.T, mean_excess_ret.T)[:2]
# float
gamma_rmse = (resid / dim_n) ** .5
# float
gamma_rsq = 1 - gamma_rmse**2 / mean_excess_ret.var()
# gamma_rsq = 1 - (1 - gamma_rsq) * (dim_n - 1) / (dim_n - dim_k - 1)
gamma_rsq *= 100
param = convert_theta_to1d(alpha, beta, gamma)
return param, gamma_rsq, gamma_rmse, theta_rsq, theta_rmse
def param_stde(self, theta, **kwargs):
"""Standard errors for parameter estimates.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
Returns
-------
(dim_k*(dim_n+1)-1, ) array
"""
var = self.compute_theta_var(theta, **kwargs)
return np.abs(np.diag(var))**.5
def param_tstat(self, theta, **kwargs):
"""T-statistics for parameter estimates.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
Returns
-------
(dim_k*(dim_n+1)-1, ) array
"""
return theta / self.param_stde(theta, **kwargs)
def alpha_beta_gamma_stde(self, theta, **kwargs):
"""Standard errors for parameter estimates.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
Returns
-------
alpha_stde : (dim_n, ) array
Intercepts in time series regressions
beta_stde : (dim_k-1, dim_n) array
Risk exposures
gamma_stde : (dim_k-1, ) array
Risk premia
"""
stde = self.param_stde(theta, **kwargs)
return self.convert_theta_to2d(stde)
def alpha_beta_gamma_tstat(self, theta, **kwargs):
"""Standard errors for parameter estimates.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
Returns
-------
alpha_tstat : (dim_n, ) array
Intercepts in time series regressions
beta_tstat : (dim_k-1, dim_n) array
Risk exposures
gamma_tstat : (dim_k-1, ) array
Risk premia
"""
tstat = self.param_tstat(theta, **kwargs)
return self.convert_theta_to2d(tstat)
def jtest(self, theta, **kwargs):
"""J-test for misspecification of the model.
Tests whether all intercepts alphas are simultaneously zero.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Parameter vector
Returns
-------
jstat : int
J-statistic
jpval : int
Corresponding p-value of the test, percent
"""
dim_n, dim_k = self.__get_dimensions()[1:]
param_var = self.compute_theta_var(theta, **kwargs)
alpha_var = param_var[0:dim_n*dim_k:dim_k, 0:dim_n*dim_k:dim_k]
eig = np.linalg.eigvalsh(alpha_var).min()
if eig <= 0:
alpha_var -= np.eye(dim_n) * eig * 1.1
inv_var = np.linalg.pinv(alpha_var)
try:
np.linalg.cholesky(inv_var)
except np.linalg.LinAlgError:
warnings.warn('Inverse of alpha variance is not P.D.!')
alpha = self.convert_theta_to2d(theta)[0]
jstat = (alpha.dot(inv_var) * alpha).sum()
jpval = 1 - chi2(dim_n).cdf(jstat)
return jstat, jpval*100
def convert_theta_to2d(self, theta):
"""Convert parameter vector to matrices.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Returns
-------
alpha : (dim_n, ) array
Intercepts in time series regressions
beta : (dim_k-1, dim_n) array
Risk exposures
gamma : (dim_k-1, ) array
Risk premia
"""
dim_n, dim_k = self.__get_dimensions()[1:]
temp = np.reshape(theta[:dim_n*dim_k], (dim_n, dim_k)).T
alpha = temp[0]
beta = temp[1:]
gamma = theta[dim_n*dim_k:]
return alpha, beta, gamma
def momcond(self, theta, **kwargs):
"""Moment restrictions and avergae of its derivatives.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Returns
-------
moments : (dim_t, dim_n*(dim_k+1)) array
Moment restrictions
dmoments : (dim_k*(dim_n+1), dim_n*(dim_k+1)) array
Average derivative of the moment restrictions
"""
dim_t, dim_n, dim_k = self.__get_dimensions()
alpha, beta, gamma = self.convert_theta_to2d(theta)
errors1 = self.excess_ret - alpha - self.factors[:, 1:].dot(beta)
moments1 = errors1[:, :, np.newaxis] * self.factors[:, np.newaxis, :]
# (dim_t, dim_n*dim_k) array
moments1 = moments1.reshape(dim_t, dim_n*dim_k)
# (dim_t, dim_n) array
errors2 = self.excess_ret - beta.T.dot(gamma)
# (dim_t, dim_k-1) array
moments2 = errors2.dot(beta.T)
# (dim_t, (dim_n+1)*dim_k-1) array
moments = np.hstack((moments1, moments2))
dmoments = np.zeros(((dim_n+1)*dim_k-1, (dim_n+1)*dim_k-1))
# (dim_k, dim_k) array
factor_var = self.factors.T.dot(self.factors) / dim_t
eye = np.eye(dim_n)
# (dim_n*dim_k, dim_n*dim_k) array
dmoments[:dim_n*dim_k, :dim_n*dim_k] = np.kron(eye, factor_var)
# (dim_k-1, dim_k-1) array
dmoments[dim_n*dim_k:, dim_n*dim_k:] = -beta.dot(beta.T)
for i in range(dim_n):
temp = np.zeros((dim_k-1, dim_k))
values = np.mean(errors2[:, i]) - beta[:, i] * gamma
temp[:, 1:] = np.diag(values)
dmoments[dim_n*dim_k:, i*dim_k:(i+1)*dim_k] = temp
return moments, dmoments.T
def compute_theta_var(self, theta, **kwargs):
"""Estimate variance of the estimator using GMM variance matrix.
Parameters
----------
theta : (dim_k*(dim_n+1)-1, ) array
Risk exposures
Returns
-------
(dim_k*(dim_n+1)-1, dim_k*(dim_n+1)-1) array
Variance matrix of the estimator
"""
estimator = GMM(self.momcond)
return estimator.varest(theta, **kwargs)
def gmmest(self, theta, **kwargs):
"""Estimate model parameters using GMM.
"""
estimator = GMM(self.momcond)
return estimator.gmmest(theta, **kwargs)
def get_realized_ret(self):
"""Estimate variance of the estimator using GMM variance matrix.
Returns
-------
(dim_n, ) array
Realized average (across time) returns
"""
return self.excess_ret.mean(0)
def get_predicted_ret(self, param):
"""Estimate variance of the estimator using GMM variance matrix.
Parameters
----------
param : (dim_k*(dim_n+1)-1, ) array
Model parameters
Returns
-------
(dim_n, ) array
Predicted average (across time) returns
"""
beta, gamma = self.convert_theta_to2d(param)[1:]
return beta.T.dot(gamma)
def convert_theta_to1d(alpha, beta, gamma):
"""Convert parameter matrices to 1d vector.
Parameters
----------
alpha : (dim_n, ) array
Intercepts in time series regressions
beta : (dim_k-1, dim_n) array
Risk exposures
gamma : (dim_k,) array
Risk premia
Returns
-------
(dim_k*(dim_n+1)-1, ) array
"""
beta = np.vstack((alpha, beta)).T
return np.concatenate((beta.flatten(), gamma))
if __name__ == '__main__':
pass
| |
"""
Tests for pika.channel.ContentFrameDispatcher
"""
import collections
import logging
try:
import mock
except ImportError:
from unittest import mock
try:
import unittest2 as unittest
except ImportError:
import unittest
import warnings
from pika import channel
from pika import exceptions
from pika import frame
from pika import spec
class ChannelTests(unittest.TestCase):
@mock.patch('pika.connection.Connection')
def _create_connection(self, connection=None):
return connection
def setUp(self):
self.connection = self._create_connection()
self._on_openok_callback = mock.Mock()
self.obj = channel.Channel(self.connection, 1, self._on_openok_callback)
warnings.resetwarnings()
def tearDown(self):
del self.connection
del self._on_openok_callback
del self.obj
warnings.resetwarnings()
def test_init_invalid_channel_number(self):
self.assertRaises(exceptions.InvalidChannelNumber, channel.Channel,
'Foo', self.connection)
def test_init_channel_number(self):
self.assertEqual(self.obj.channel_number, 1)
def test_init_callbacks(self):
self.assertEqual(self.obj.callbacks, self.connection.callbacks)
def test_init_connection(self):
self.assertEqual(self.obj.connection, self.connection)
def test_init_frame_dispatcher(self):
self.assertIsInstance(self.obj.frame_dispatcher,
channel.ContentFrameDispatcher)
def test_init_blocked(self):
self.assertIsInstance(self.obj._blocked, collections.deque)
def test_init_blocking(self):
self.assertEqual(self.obj._blocking, None)
def test_init_on_flowok_callback(self):
self.assertEqual(self.obj._on_flowok_callback, None)
def test_init_has_on_flow_callback(self):
self.assertEqual(self.obj._has_on_flow_callback, False)
def test_init_on_openok_callback(self):
self.assertEqual(self.obj._on_openok_callback, self._on_openok_callback)
def test_init_state(self):
self.assertEqual(self.obj._state, channel.Channel.CLOSED)
def test_init_cancelled(self):
self.assertIsInstance(self.obj._cancelled, set)
def test_init_consumers(self):
self.assertEqual(self.obj._consumers, dict())
def test_init_pending(self):
self.assertEqual(self.obj._pending, dict())
def test_init_on_getok_callback(self):
self.assertEqual(self.obj._on_getok_callback, None)
def test_add_callback(self):
mock_callback = mock.Mock()
self.obj.add_callback(mock_callback, [spec.Basic.Qos])
self.connection.callbacks.add.assert_called_once_with(
self.obj.channel_number, spec.Basic.Qos, mock_callback, True)
def test_add_callback_multiple_replies(self):
mock_callback = mock.Mock()
self.obj.add_callback(mock_callback, [spec.Basic.Qos, spec.Basic.QosOk])
calls = [mock.call(self.obj.channel_number, spec.Basic.Qos,
mock_callback, True),
mock.call(self.obj.channel_number, spec.Basic.QosOk,
mock_callback, True)]
self.connection.callbacks.add.assert_has_calls(calls)
def test_add_on_cancel_callback(self):
mock_callback = mock.Mock()
self.obj.add_on_cancel_callback(mock_callback)
self.connection.callbacks.add.assert_called_once_with(
self.obj.channel_number, spec.Basic.Cancel, mock_callback, False)
def test_add_on_close_callback(self):
mock_callback = mock.Mock()
self.obj.add_on_close_callback(mock_callback)
self.connection.callbacks.add.assert_called_once_with(
self.obj.channel_number, '_on_channel_close', mock_callback, False,
self.obj)
def test_add_on_flow_callback(self):
mock_callback = mock.Mock()
self.obj.add_on_flow_callback(mock_callback)
self.connection.callbacks.add.assert_called_once_with(
self.obj.channel_number, spec.Channel.Flow, mock_callback, False)
def test_add_on_return_callback(self):
mock_callback = mock.Mock()
self.obj.add_on_return_callback(mock_callback)
self.connection.callbacks.add.assert_called_once_with(
self.obj.channel_number, '_on_return', mock_callback, False)
def test_basic_ack_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_ack)
@mock.patch('pika.channel.Channel._validate_channel_and_callback')
def test_basic_cancel_calls_validate(self, validate):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
callback_mock = mock.Mock()
self.obj._consumers[consumer_tag] = callback_mock
self.obj.basic_cancel(callback_mock, consumer_tag)
validate.assert_called_once_with(callback_mock)
@mock.patch('pika.spec.Basic.Ack')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_send_method_calls_rpc(self, send_method, unused):
self.obj._set_state(self.obj.OPEN)
self.obj.basic_ack(1, False)
send_method.assert_called_once_with(spec.Basic.Ack(1, False))
@mock.patch('pika.channel.Channel._rpc')
def test_basic_cancel_no_consumer_tag(self, rpc):
self.obj._set_state(self.obj.OPEN)
callback_mock = mock.Mock()
consumer_tag = 'ctag0'
self.obj.basic_cancel(callback_mock, consumer_tag)
self.assertFalse(rpc.called)
@mock.patch('pika.channel.Channel._rpc')
def test_basic_cancel_channel_cancelled_appended(self, unused):
self.obj._set_state(self.obj.OPEN)
callback_mock = mock.Mock()
consumer_tag = 'ctag0'
self.obj._consumers[consumer_tag] = mock.Mock()
self.obj.basic_cancel(callback_mock, consumer_tag)
self.assertListEqual(list(self.obj._cancelled), [consumer_tag])
def test_basic_cancel_callback_appended(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
callback_mock = mock.Mock()
self.obj._consumers[consumer_tag] = callback_mock
self.obj.basic_cancel(callback_mock, consumer_tag)
expectation = [self.obj.channel_number, spec.Basic.CancelOk,
callback_mock]
self.obj.callbacks.add.assert_any_call(*expectation)
def test_basic_cancel_raises_value_error(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
callback_mock = mock.Mock()
self.obj._consumers[consumer_tag] = callback_mock
self.assertRaises(ValueError, self.obj.basic_cancel, callback_mock,
consumer_tag,
nowait=True)
def test_basic_cancel_then_close(self):
self.obj._set_state(self.obj.OPEN)
callback_mock = mock.Mock()
consumer_tag = 'ctag0'
self.obj._consumers[consumer_tag] = mock.Mock()
self.obj.basic_cancel(callback_mock, consumer_tag)
try:
self.obj.close()
except exceptions.ChannelClosed:
self.fail('unable to cancel consumers as channel is closing')
self.assertTrue(self.obj.is_closing)
def test_basic_cancel_on_cancel_appended(self):
self.obj._set_state(self.obj.OPEN)
self.obj._consumers['ctag0'] = logging.debug
self.obj.basic_cancel(consumer_tag='ctag0')
expectation = [self.obj.channel_number, spec.Basic.CancelOk,
self.obj._on_cancelok]
self.obj.callbacks.add.assert_any_call(
*expectation,
arguments={'consumer_tag': 'ctag0'})
def test_basic_consume_channel_closed(self):
mock_callback = mock.Mock()
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_consume,
mock_callback, 'test-queue')
@mock.patch('pika.channel.Channel._validate_channel_and_callback')
def test_basic_consume_calls_validate(self, validate):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue')
validate.assert_called_once_with(mock_callback)
def test_basic_consume_consumer_tag(self):
self.obj._set_state(self.obj.OPEN)
expectation = 'ctag1.'
mock_callback = mock.Mock()
self.assertEqual(
self.obj.basic_consume(mock_callback, 'test-queue')[:6],
expectation)
def test_basic_consume_consumer_tag_cancelled_full(self):
self.obj._set_state(self.obj.OPEN)
expectation = 'ctag1.'
mock_callback = mock.Mock()
for ctag in ['ctag1.%i' % ii for ii in range(11)]:
self.obj._cancelled.add(ctag)
self.assertEqual(
self.obj.basic_consume(mock_callback, 'test-queue')[:6],
expectation)
def test_basic_consume_consumer_tag_in_consumers(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue',
consumer_tag=consumer_tag)
self.assertIn(consumer_tag, self.obj._consumers)
def test_basic_consume_duplicate_consumer_tag_raises(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj._consumers[consumer_tag] = logging.debug
self.assertRaises(exceptions.DuplicateConsumerTag,
self.obj.basic_consume, mock_callback, 'test-queue',
False, False, consumer_tag)
def test_basic_consume_consumers_callback_value(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue',
consumer_tag=consumer_tag)
self.assertEqual(self.obj._consumers[consumer_tag], mock_callback)
def test_basic_consume_has_pending_list(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue',
consumer_tag=consumer_tag)
self.assertIn(consumer_tag, self.obj._pending)
def test_basic_consume_consumers_pending_list_is_empty(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue',
consumer_tag=consumer_tag)
self.assertEqual(self.obj._pending[consumer_tag], list())
@mock.patch('pika.spec.Basic.Consume')
@mock.patch('pika.channel.Channel._rpc')
def test_basic_consume_consumers_rpc_called(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag1.0'
mock_callback = mock.Mock()
self.obj.basic_consume(mock_callback, 'test-queue',
consumer_tag=consumer_tag)
expectation = spec.Basic.Consume(queue='test-queue',
consumer_tag=consumer_tag,
no_ack=False,
exclusive=False)
rpc.assert_called_once_with(expectation, self.obj._on_eventok,
[(spec.Basic.ConsumeOk,
{'consumer_tag': consumer_tag})])
@mock.patch('pika.channel.Channel._validate_channel_and_callback')
def test_basic_get_calls_validate(self, validate):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_get(mock_callback, 'test-queue')
validate.assert_called_once_with(mock_callback)
@mock.patch('pika.channel.Channel._send_method')
def test_basic_get_callback(self, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_get(mock_callback, 'test-queue')
self.assertEqual(self.obj._on_getok_callback, mock_callback)
@mock.patch('pika.spec.Basic.Get')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_get_send_method_called(self, send_method, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_get(mock_callback, 'test-queue', False)
send_method.assert_called_once_with(spec.Basic.Get(queue='test-queue',
no_ack=False))
def test_basic_nack_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_nack, 0,
False, True)
@mock.patch('pika.spec.Basic.Nack')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_nack_send_method_request(self, send_method, unused):
self.obj._set_state(self.obj.OPEN)
self.obj.basic_nack(1, False, True)
send_method.assert_called_once_with(spec.Basic.Nack(1, False, True))
def test_basic_publish_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_publish,
'foo', 'bar', 'baz')
@mock.patch('pika.channel.LOGGER')
@mock.patch('pika.spec.Basic.Publish')
@mock.patch('pika.channel.Channel._send_method')
def test_immediate_called_logger_warning(self, send_method, unused, logger):
self.obj._set_state(self.obj.OPEN)
exchange = 'basic_publish_test'
routing_key = 'routing-key-fun'
body = b'This is my body'
properties = spec.BasicProperties(content_type='text/plain')
mandatory = False
immediate = True
self.obj.basic_publish(exchange, routing_key, body, properties,
mandatory, immediate)
logger.warning.assert_called_once_with('The immediate flag is '
'deprecated in RabbitMQ')
@mock.patch('pika.spec.Basic.Publish')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_publish_send_method_request(self, send_method, unused):
self.obj._set_state(self.obj.OPEN)
exchange = 'basic_publish_test'
routing_key = 'routing-key-fun'
body = b'This is my body'
properties = spec.BasicProperties(content_type='text/plain')
mandatory = False
immediate = False
self.obj.basic_publish(exchange, routing_key, body, properties,
mandatory, immediate)
send_method.assert_called_once_with(
spec.Basic.Publish(exchange=exchange,
routing_key=routing_key,
mandatory=mandatory,
immediate=immediate), (properties, body))
def test_basic_qos_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_qos, 0,
False, True)
@mock.patch('pika.spec.Basic.Qos')
@mock.patch('pika.channel.Channel._rpc')
def test_basic_qos_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_qos(mock_callback, 10, 20, False)
rpc.assert_called_once_with(spec.Basic.Qos(mock_callback, 10, 20,
False), mock_callback,
[spec.Basic.QosOk])
def test_basic_reject_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_reject, 1,
False)
@mock.patch('pika.spec.Basic.Reject')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_reject_send_method_request(self, send_method, unused):
self.obj._set_state(self.obj.OPEN)
self.obj.basic_reject(1, True)
send_method.assert_called_once_with(spec.Basic.Reject(1, True))
def test_basic_recover_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.basic_qos, 0,
False, True)
@mock.patch('pika.spec.Basic.Recover')
@mock.patch('pika.channel.Channel._rpc')
def test_basic_recover_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.basic_recover(mock_callback, True)
rpc.assert_called_once_with(spec.Basic.Recover(mock_callback, True),
mock_callback, [spec.Basic.RecoverOk])
def test_close_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.close)
def test_close_state(self):
self.obj._set_state(self.obj.OPEN)
self.obj.close()
self.assertEqual(self.obj._state, channel.Channel.CLOSING)
def test_close_basic_cancel_called(self):
self.obj._set_state(self.obj.OPEN)
self.obj._consumers['abc'] = None
with mock.patch.object(self.obj, 'basic_cancel') as basic_cancel:
self.obj.close()
basic_cancel.assert_called_once_with(consumer_tag='abc')
def test_confirm_delivery_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.confirm_delivery)
def test_confirm_delivery_raises_method_not_implemented_for_confirms(self):
self.obj._set_state(self.obj.OPEN)
# Since connection is a mock.Mock, overwrite the method def with False
self.obj.connection.publisher_confirms = False
self.assertRaises(exceptions.MethodNotImplemented,
self.obj.confirm_delivery, logging.debug)
def test_confirm_delivery_raises_method_not_implemented_for_nack(self):
self.obj._set_state(self.obj.OPEN)
# Since connection is a mock.Mock, overwrite the method def with False
self.obj.connection.basic_nack = False
self.assertRaises(exceptions.MethodNotImplemented,
self.obj.confirm_delivery, logging.debug)
def test_confirm_delivery_callback_without_nowait_selectok(self):
self.obj._set_state(self.obj.OPEN)
expectation = [self.obj.channel_number, spec.Confirm.SelectOk,
self.obj._on_selectok]
self.obj.confirm_delivery(logging.debug)
self.obj.callbacks.add.assert_called_with(*expectation, arguments=None)
def test_confirm_delivery_callback_with_nowait(self):
self.obj._set_state(self.obj.OPEN)
expectation = [self.obj.channel_number, spec.Confirm.SelectOk,
self.obj._on_selectok]
self.obj.confirm_delivery(logging.debug, True)
self.assertNotIn(mock.call(*expectation,
arguments=None),
self.obj.callbacks.add.call_args_list)
def test_confirm_delivery_callback_basic_ack(self):
self.obj._set_state(self.obj.OPEN)
expectation = (self.obj.channel_number, spec.Basic.Ack, logging.debug,
False)
self.obj.confirm_delivery(logging.debug)
self.obj.callbacks.add.assert_any_call(*expectation)
def test_confirm_delivery_callback_basic_nack(self):
self.obj._set_state(self.obj.OPEN)
expectation = (self.obj.channel_number, spec.Basic.Nack, logging.debug,
False)
self.obj.confirm_delivery(logging.debug)
self.obj.callbacks.add.assert_any_call(*expectation)
def test_confirm_delivery_no_callback_callback_call_count(self):
self.obj._set_state(self.obj.OPEN)
self.obj.confirm_delivery()
expectation = [mock.call(*[self.obj.channel_number,
spec.Confirm.SelectOk,
self.obj._on_synchronous_complete],
arguments=None),
mock.call(*[self.obj.channel_number,
spec.Confirm.SelectOk,
self.obj._on_selectok,],
arguments=None)]
self.assertEqual(self.obj.callbacks.add.call_args_list, expectation)
def test_confirm_delivery_no_callback_no_basic_ack_callback(self):
self.obj._set_state(self.obj.OPEN)
expectation = [self.obj.channel_number, spec.Basic.Ack, None, False]
self.obj.confirm_delivery()
self.assertNotIn(mock.call(*expectation),
self.obj.callbacks.add.call_args_list)
def test_confirm_delivery_no_callback_no_basic_nack_callback(self):
self.obj._set_state(self.obj.OPEN)
expectation = [self.obj.channel_number, spec.Basic.Nack, None, False]
self.obj.confirm_delivery()
self.assertNotIn(mock.call(*expectation),
self.obj.callbacks.add.call_args_list)
def test_consumer_tags(self):
self.assertListEqual(self.obj.consumer_tags, list(self.obj._consumers.keys()))
def test_exchange_bind_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.exchange_bind,
None, 'foo', 'bar', 'baz')
def test_exchange_bind_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.exchange_bind, 'callback', 'foo',
'bar', 'baz')
@mock.patch('pika.spec.Exchange.Bind')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_bind_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_bind(mock_callback, 'foo', 'bar', 'baz')
rpc.assert_called_once_with(spec.Exchange.Bind(0, 'foo', 'bar', 'baz'),
mock_callback, [spec.Exchange.BindOk])
@mock.patch('pika.spec.Exchange.Bind')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_bind_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_bind(mock_callback, 'foo', 'bar', 'baz', nowait=True)
rpc.assert_called_once_with(spec.Exchange.Bind(0, 'foo', 'bar', 'baz'),
mock_callback, [])
def test_exchange_declare_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.exchange_declare,
exchange='foo')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_declare_with_type_arg_raises_deprecation_warning(self,
_rpc):
self.obj._set_state(self.obj.OPEN)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.obj.exchange_declare(None, 'foo', type='direct')
self.assertEqual(len(w), 1)
self.assertIs(w[-1].category, DeprecationWarning)
@mock.patch('pika.spec.Exchange.Declare')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_declare_with_type_arg_assigns_to_exchange_type(self, rpc,
unused):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_declare(mock_callback,
exchange='foo',
type='topic')
rpc.assert_called_once_with(spec.Exchange.Declare(0, 'foo',
'topic'),
mock_callback,
[spec.Exchange.DeclareOk])
def test_exchange_declare_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.exchange_declare, 'callback',
'foo')
@mock.patch('pika.spec.Exchange.Declare')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_declare_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_declare(mock_callback, 'foo')
rpc.assert_called_once_with(spec.Exchange.Declare(0, 'foo'),
mock_callback, [spec.Exchange.DeclareOk])
@mock.patch('pika.spec.Exchange.Declare')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_declare_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_declare(mock_callback, 'foo', nowait=True)
rpc.assert_called_once_with(spec.Exchange.Declare(0, 'foo'),
mock_callback, [])
def test_exchange_delete_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.exchange_delete,
exchange='foo')
def test_exchange_delete_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.exchange_delete, 'callback',
'foo')
@mock.patch('pika.spec.Exchange.Delete')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_delete_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_delete(mock_callback, 'foo')
rpc.assert_called_once_with(spec.Exchange.Delete(0, 'foo'),
mock_callback, [spec.Exchange.DeleteOk])
@mock.patch('pika.spec.Exchange.Delete')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_delete_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_delete(mock_callback, 'foo', nowait=True)
rpc.assert_called_once_with(spec.Exchange.Delete(0, 'foo'),
mock_callback, [])
def test_exchange_unbind_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.exchange_unbind,
None, 'foo', 'bar', 'baz')
def test_exchange_unbind_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.exchange_unbind, 'callback',
'foo', 'bar', 'baz')
@mock.patch('pika.spec.Exchange.Unbind')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_unbind_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_unbind(mock_callback, 'foo', 'bar', 'baz')
rpc.assert_called_once_with(
spec.Exchange.Unbind(0, 'foo', 'bar', 'baz'), mock_callback,
[spec.Exchange.UnbindOk])
@mock.patch('pika.spec.Exchange.Unbind')
@mock.patch('pika.channel.Channel._rpc')
def test_exchange_unbind_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.exchange_unbind(mock_callback, 'foo', 'bar', 'baz',
nowait=True)
rpc.assert_called_once_with(
spec.Exchange.Unbind(0, 'foo', 'bar', 'baz'), mock_callback, [])
def test_flow_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.flow, 'foo', True)
def test_flow_raises_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.flow, 'foo', True)
@mock.patch('pika.spec.Channel.Flow')
@mock.patch('pika.channel.Channel._rpc')
def test_flow_on_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.flow(mock_callback, True)
rpc.assert_called_once_with(spec.Channel.Flow(True),
self.obj._on_flowok, [spec.Channel.FlowOk])
@mock.patch('pika.spec.Channel.Flow')
@mock.patch('pika.channel.Channel._rpc')
def test_flow_off_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.flow(mock_callback, False)
rpc.assert_called_once_with(spec.Channel.Flow(False),
self.obj._on_flowok, [spec.Channel.FlowOk])
@mock.patch('pika.channel.Channel._rpc')
def test_flow_on_flowok_callback(self, rpc):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.flow(mock_callback, True)
self.assertEqual(self.obj._on_flowok_callback, mock_callback)
def test_is_closed_true(self):
self.obj._set_state(self.obj.CLOSED)
self.assertTrue(self.obj.is_closed)
def test_is_closed_false(self):
self.obj._set_state(self.obj.OPEN)
self.assertFalse(self.obj.is_closed)
def test_is_closing_true(self):
self.obj._set_state(self.obj.CLOSING)
self.assertTrue(self.obj.is_closing)
def test_is_closing_false(self):
self.obj._set_state(self.obj.OPEN)
self.assertFalse(self.obj.is_closing)
@mock.patch('pika.channel.Channel._rpc')
def test_channel_open_add_callbacks_called(self, rpc):
with mock.patch.object(self.obj, '_add_callbacks') as _add_callbacks:
self.obj.open()
_add_callbacks.assert_called_once_with()
def test_queue_bind_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.queue_bind, None,
'foo', 'bar', 'baz')
def test_queue_bind_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.queue_bind, 'callback', 'foo',
'bar', 'baz')
@mock.patch('pika.spec.Queue.Bind')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_bind_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_bind(mock_callback, 'foo', 'bar', 'baz')
rpc.assert_called_once_with(spec.Queue.Bind(0, 'foo', 'bar', 'baz'),
mock_callback, [spec.Queue.BindOk])
@mock.patch('pika.spec.Queue.Bind')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_bind_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_bind(mock_callback, 'foo', 'bar', 'baz', nowait=True)
rpc.assert_called_once_with(spec.Queue.Bind(0, 'foo', 'bar', 'baz'),
mock_callback, [])
def test_queue_declare_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.queue_declare,
None,
queue='foo')
def test_queue_declare_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.queue_declare, 'callback', 'foo')
@mock.patch('pika.spec.Queue.Declare')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_declare_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_declare(mock_callback, 'foo')
rpc.assert_called_once_with(spec.Queue.Declare(0, 'foo'), mock_callback,
[(spec.Queue.DeclareOk, {'queue': 'foo'})])
@mock.patch('pika.spec.Queue.Declare')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_declare_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_declare(mock_callback, 'foo', nowait=True)
rpc.assert_called_once_with(spec.Queue.Declare(0, 'foo'),
mock_callback, [])
def test_queue_delete_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.queue_delete,
queue='foo')
def test_queue_delete_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.queue_delete, 'callback', 'foo')
@mock.patch('pika.spec.Queue.Delete')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_delete_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_delete(mock_callback, 'foo')
rpc.assert_called_once_with(spec.Queue.Delete(0, 'foo'), mock_callback,
[spec.Queue.DeleteOk])
@mock.patch('pika.spec.Queue.Delete')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_delete_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_delete(mock_callback, 'foo', nowait=True)
rpc.assert_called_once_with(spec.Queue.Delete(0, 'foo'),
mock_callback, [])
def test_queue_purge_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.queue_purge,
queue='foo')
def test_queue_purge_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.queue_purge, 'callback', 'foo')
@mock.patch('pika.spec.Queue.Purge')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_purge_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_purge(mock_callback, 'foo')
rpc.assert_called_once_with(spec.Queue.Purge(0, 'foo'), mock_callback,
[spec.Queue.PurgeOk])
@mock.patch('pika.spec.Queue.Purge')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_purge_rpc_request_nowait(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_purge(mock_callback, 'foo', nowait=True)
rpc.assert_called_once_with(spec.Queue.Purge(0, 'foo'),
mock_callback, [])
def test_queue_unbind_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.queue_unbind, None,
'foo', 'bar', 'baz')
def test_queue_unbind_raises_value_error_on_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj.queue_unbind, 'callback', 'foo',
'bar', 'baz')
@mock.patch('pika.spec.Queue.Unbind')
@mock.patch('pika.channel.Channel._rpc')
def test_queue_unbind_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.queue_unbind(mock_callback, 'foo', 'bar', 'baz')
rpc.assert_called_once_with(spec.Queue.Unbind(0, 'foo', 'bar', 'baz'),
mock_callback, [spec.Queue.UnbindOk])
def test_tx_commit_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj.tx_commit, None)
@mock.patch('pika.spec.Tx.Commit')
@mock.patch('pika.channel.Channel._rpc')
def test_tx_commit_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.tx_commit(mock_callback)
rpc.assert_called_once_with(spec.Tx.Commit(mock_callback),
mock_callback, [spec.Tx.CommitOk])
@mock.patch('pika.spec.Tx.Rollback')
@mock.patch('pika.channel.Channel._rpc')
def test_tx_rollback_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.tx_rollback(mock_callback)
rpc.assert_called_once_with(spec.Tx.Rollback(mock_callback),
mock_callback, [spec.Tx.RollbackOk])
@mock.patch('pika.spec.Tx.Select')
@mock.patch('pika.channel.Channel._rpc')
def test_tx_select_rpc_request(self, rpc, unused):
self.obj._set_state(self.obj.OPEN)
mock_callback = mock.Mock()
self.obj.tx_select(mock_callback)
rpc.assert_called_once_with(spec.Tx.Select(mock_callback),
mock_callback, [spec.Tx.SelectOk])
# Test internal methods
def test_add_callbacks_basic_cancel_empty_added(self):
self.obj._add_callbacks()
self.obj.callbacks.add.assert_any_call(self.obj.channel_number,
spec.Basic.Cancel,
self.obj._on_cancel, False)
def test_add_callbacks_basic_get_empty_added(self):
self.obj._add_callbacks()
print(self.obj.callbacks.add.__dict__)
self.obj.callbacks.add.assert_any_call(self.obj.channel_number,
spec.Basic.GetEmpty,
self.obj._on_getempty, False)
def test_add_callbacks_channel_close_added(self):
self.obj._add_callbacks()
self.obj.callbacks.add.assert_any_call(self.obj.channel_number,
spec.Channel.Close,
self.obj._on_close, True)
def test_add_callbacks_channel_flow_added(self):
self.obj._add_callbacks()
self.obj.callbacks.add.assert_any_call(self.obj.channel_number,
spec.Channel.Flow,
self.obj._on_flow, False)
def test_cleanup(self):
self.obj._cleanup()
self.obj.callbacks.cleanup.assert_called_once_with(
str(self.obj.channel_number))
def test_get_pending_message(self):
key = 'foo'
expectation = 'abc1234'
self.obj._pending = {key: [expectation]}
self.assertEqual(self.obj._get_pending_msg(key), expectation)
def test_get_pending_message_item_popped(self):
key = 'foo'
expectation = 'abc1234'
self.obj._pending = {key: [expectation]}
self.obj._get_pending_msg(key)
self.assertEqual(len(self.obj._pending[key]), 0)
def test_handle_content_frame_method_returns_none(self):
frame_value = frame.Method(1, spec.Basic.Deliver('ctag0', 1))
self.assertEqual(self.obj._handle_content_frame(frame_value), None)
def test_handle_content_frame_sets_method_frame(self):
frame_value = frame.Method(1, spec.Basic.Deliver('ctag0', 1))
self.obj._handle_content_frame(frame_value)
self.assertEqual(self.obj.frame_dispatcher._method_frame, frame_value)
def test_handle_content_frame_sets_header_frame(self):
frame_value = frame.Header(1, 10, spec.BasicProperties())
self.obj._handle_content_frame(frame_value)
self.assertEqual(self.obj.frame_dispatcher._header_frame, frame_value)
def test_handle_content_frame_basic_deliver_called(self):
method_value = frame.Method(1, spec.Basic.Deliver('ctag0', 1))
self.obj._handle_content_frame(method_value)
header_value = frame.Header(1, 10, spec.BasicProperties())
self.obj._handle_content_frame(header_value)
body_value = frame.Body(1, b'0123456789')
with mock.patch.object(self.obj, '_on_deliver') as deliver:
self.obj._handle_content_frame(body_value)
deliver.assert_called_once_with(method_value, header_value,
b'0123456789')
def test_handle_content_frame_basic_get_called(self):
method_value = frame.Method(1, spec.Basic.GetOk('ctag0', 1))
self.obj._handle_content_frame(method_value)
header_value = frame.Header(1, 10, spec.BasicProperties())
self.obj._handle_content_frame(header_value)
body_value = frame.Body(1, b'0123456789')
with mock.patch.object(self.obj, '_on_getok') as getok:
self.obj._handle_content_frame(body_value)
getok.assert_called_once_with(method_value, header_value,
b'0123456789')
def test_handle_content_frame_basic_return_called(self):
method_value = frame.Method(1, spec.Basic.Return(999, 'Reply Text',
'exchange_value',
'routing.key'))
self.obj._handle_content_frame(method_value)
header_value = frame.Header(1, 10, spec.BasicProperties())
self.obj._handle_content_frame(header_value)
body_value = frame.Body(1, b'0123456789')
with mock.patch.object(self.obj, '_on_return') as basic_return:
self.obj._handle_content_frame(body_value)
basic_return.assert_called_once_with(method_value, header_value,
b'0123456789')
def test_has_content_true(self):
self.assertTrue(self.obj._has_content(spec.Basic.GetOk))
def test_has_content_false(self):
self.assertFalse(self.obj._has_content(spec.Basic.Ack))
def test_on_cancel_not_appended_cancelled(self):
consumer_tag = 'ctag0'
frame_value = frame.Method(1, spec.Basic.Cancel(consumer_tag))
self.obj._on_cancel(frame_value)
self.assertNotIn(consumer_tag, self.obj._cancelled)
def test_on_cancel_removed_consumer(self):
consumer_tag = 'ctag0'
self.obj._consumers[consumer_tag] = logging.debug
frame_value = frame.Method(1, spec.Basic.Cancel(consumer_tag))
self.obj._on_cancel(frame_value)
self.assertNotIn(consumer_tag, self.obj._consumers)
def test_on_cancelok_removed_consumer(self):
consumer_tag = 'ctag0'
self.obj._consumers[consumer_tag] = logging.debug
frame_value = frame.Method(1, spec.Basic.CancelOk(consumer_tag))
self.obj._on_cancelok(frame_value)
self.assertNotIn(consumer_tag, self.obj._consumers)
def test_on_cancelok_removed_pending(self):
consumer_tag = 'ctag0'
self.obj._pending[consumer_tag] = logging.debug
frame_value = frame.Method(1, spec.Basic.CancelOk(consumer_tag))
self.obj._on_cancelok(frame_value)
self.assertNotIn(consumer_tag, self.obj._pending)
def test_on_deliver_pending_called(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
mock_callback = mock.Mock()
self.obj._pending[consumer_tag] = mock_callback
method_value = frame.Method(1, spec.Basic.Deliver(consumer_tag, 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
with mock.patch.object(self.obj, '_add_pending_msg') as add_pending:
self.obj._on_deliver(method_value, header_value, body_value)
add_pending.assert_called_with(consumer_tag, method_value,
header_value, body_value)
def test_on_deliver_callback_called(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
mock_callback = mock.Mock()
self.obj._pending[consumer_tag] = list()
self.obj._consumers[consumer_tag] = mock_callback
method_value = frame.Method(1, spec.Basic.Deliver(consumer_tag, 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
self.obj._on_deliver(method_value, header_value, body_value)
mock_callback.assert_called_with(self.obj, method_value.method,
header_value.properties, body_value)
def test_on_deliver_pending_callbacks_called(self):
self.obj._set_state(self.obj.OPEN)
consumer_tag = 'ctag0'
mock_callback = mock.Mock()
self.obj._pending[consumer_tag] = list()
method_value = frame.Method(1, spec.Basic.Deliver(consumer_tag, 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
expectation = [mock.call(self.obj, method_value.method,
header_value.properties, body_value)]
self.obj._on_deliver(method_value, header_value, body_value)
self.obj._consumers[consumer_tag] = mock_callback
method_value = frame.Method(1, spec.Basic.Deliver(consumer_tag, 2))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
self.obj._on_deliver(method_value, header_value, body_value)
expectation.append(mock.call(self.obj, method_value.method,
header_value.properties, body_value))
self.assertListEqual(mock_callback.call_args_list, expectation)
@mock.patch('logging.Logger.debug')
def test_on_getempty(self, debug):
method_frame = frame.Method(self.obj.channel_number,
spec.Basic.GetEmpty)
self.obj._on_getempty(method_frame)
debug.assert_called_with('Received Basic.GetEmpty: %r', method_frame)
@mock.patch('logging.Logger.error')
def test_on_getok_no_callback(self, error):
method_value = frame.Method(1, spec.Basic.GetOk('ctag0', 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
self.obj._on_getok(method_value, header_value, body_value)
error.assert_called_with('Basic.GetOk received with no active callback')
def test_on_getok_callback_called(self):
mock_callback = mock.Mock()
self.obj._on_getok_callback = mock_callback
method_value = frame.Method(1, spec.Basic.GetOk('ctag0', 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
self.obj._on_getok(method_value, header_value, body_value)
mock_callback.assert_called_once_with(self.obj, method_value.method,
header_value.properties,
body_value)
def test_on_getok_callback_reset(self):
mock_callback = mock.Mock()
self.obj._on_getok_callback = mock_callback
method_value = frame.Method(1, spec.Basic.GetOk('ctag0', 1))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = b'0123456789'
self.obj._on_getok(method_value, header_value, body_value)
self.assertIsNone(self.obj._on_getok_callback)
@mock.patch('logging.Logger.debug')
def test_on_confirm_selectok(self, debug):
method_frame = frame.Method(self.obj.channel_number,
spec.Confirm.SelectOk())
self.obj._on_selectok(method_frame)
debug.assert_called_with('Confirm.SelectOk Received: %r', method_frame)
@mock.patch('logging.Logger.debug')
def test_on_eventok(self, debug):
method_frame = frame.Method(self.obj.channel_number,
spec.Basic.GetEmpty())
self.obj._on_eventok(method_frame)
debug.assert_called_with('Discarding frame %r', method_frame)
@mock.patch('logging.Logger.warning')
def test_on_flow(self, warning):
self.obj._has_on_flow_callback = False
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.Flow())
self.obj._on_flow(method_frame)
warning.assert_called_with('Channel.Flow received from server')
@mock.patch('logging.Logger.warning')
def test_on_flow_with_callback(self, warning):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.Flow())
self.obj._on_flowok_callback = logging.debug
self.obj._on_flow(method_frame)
self.assertEqual(len(warning.call_args_list), 1)
@mock.patch('logging.Logger.warning')
def test_on_flowok(self, warning):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.FlowOk())
self.obj._on_flowok(method_frame)
warning.assert_called_with('Channel.FlowOk received with no active '
'callbacks')
def test_on_flowok_calls_callback(self):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.FlowOk())
mock_callback = mock.Mock()
self.obj._on_flowok_callback = mock_callback
self.obj._on_flowok(method_frame)
mock_callback.assert_called_once_with(method_frame.method.active)
def test_on_flowok_callback_reset(self):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.FlowOk())
mock_callback = mock.Mock()
self.obj._on_flowok_callback = mock_callback
self.obj._on_flowok(method_frame)
self.assertIsNone(self.obj._on_flowok_callback)
def test_on_openok_no_callback(self):
mock_callback = mock.Mock()
self.obj._on_openok_callback = None
method_value = frame.Method(1, spec.Channel.OpenOk())
self.obj._on_openok(method_value)
self.assertEqual(self.obj._state, self.obj.OPEN)
def test_on_openok_callback_called(self):
mock_callback = mock.Mock()
self.obj._on_openok_callback = mock_callback
method_value = frame.Method(1, spec.Channel.OpenOk())
self.obj._on_openok(method_value)
mock_callback.assert_called_once_with(self.obj)
def test_onreturn(self):
method_value = frame.Method(1, spec.Basic.Return(999, 'Reply Text',
'exchange_value',
'routing.key'))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = frame.Body(1, b'0123456789')
self.obj._on_return(method_value, header_value, body_value)
self.obj.callbacks.process.assert_called_with(self.obj.channel_number,
'_on_return',
self.obj,
self.obj,
method_value.method,
header_value.properties,
body_value)
@mock.patch('logging.Logger.warning')
def test_onreturn_warning(self, warning):
method_value = frame.Method(1, spec.Basic.Return(999, 'Reply Text',
'exchange_value',
'routing.key'))
header_value = frame.Header(1, 10, spec.BasicProperties())
body_value = frame.Body(1, b'0123456789')
self.obj.callbacks.process.return_value = False
self.obj._on_return(method_value, header_value, body_value)
warning.assert_called_with('Basic.Return received from server (%r, %r)',
method_value.method, header_value.properties)
@mock.patch('pika.channel.Channel._rpc')
def test_on_synchronous_complete(self, rpc):
mock_callback = mock.Mock()
expectation = [spec.Queue.Unbind(0, 'foo', 'bar', 'baz'), mock_callback,
[spec.Queue.UnbindOk]]
self.obj._blocked = collections.deque([expectation])
self.obj._on_synchronous_complete(frame.Method(self.obj.channel_number,
spec.Basic.Ack(1)))
rpc.assert_called_once_with(*expectation)
def test_rpc_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed, self.obj._rpc,
frame.Method(self.obj.channel_number,
spec.Basic.Ack(1)))
def test_rpc_while_blocking_appends_blocked_collection(self):
self.obj._set_state(self.obj.OPEN)
self.obj._blocking = spec.Confirm.Select()
expectation = [frame.Method(self.obj.channel_number, spec.Basic.Ack(1)),
'Foo', None]
self.obj._rpc(*expectation)
self.assertIn(expectation, self.obj._blocked)
def test_rpc_throws_value_error_with_unacceptable_replies(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(TypeError, self.obj._rpc, spec.Basic.Ack(1),
logging.debug, 'Foo')
def test_rpc_throws_type_error_with_invalid_callback(self):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(TypeError, self.obj._rpc, spec.Channel.Open(1),
['foo'], [spec.Channel.OpenOk])
def test_rpc_adds_on_synchronous_complete(self):
self.obj._set_state(self.obj.OPEN)
method_frame = spec.Channel.Open()
self.obj._rpc(method_frame, None, [spec.Channel.OpenOk])
self.obj.callbacks.add.assert_called_with(
self.obj.channel_number, spec.Channel.OpenOk,
self.obj._on_synchronous_complete,
arguments=None)
def test_rpc_adds_callback(self):
self.obj._set_state(self.obj.OPEN)
method_frame = spec.Channel.Open()
mock_callback = mock.Mock()
self.obj._rpc(method_frame, mock_callback, [spec.Channel.OpenOk])
self.obj.callbacks.add.assert_called_with(self.obj.channel_number,
spec.Channel.OpenOk,
mock_callback,
arguments=None)
def test_send_method(self):
expectation = [2, 3]
with mock.patch.object(self.obj.connection,
'_send_method') as send_method:
self.obj._send_method(*expectation)
send_method.assert_called_once_with(
*[self.obj.channel_number] + expectation)
def test_set_state(self):
self.obj._state = channel.Channel.CLOSED
self.obj._set_state(channel.Channel.OPENING)
self.assertEqual(self.obj._state, channel.Channel.OPENING)
def test_validate_channel_and_callback_raises_channel_closed(self):
self.assertRaises(exceptions.ChannelClosed,
self.obj._validate_channel_and_callback, None)
def test_validate_channel_and_callback_raises_value_error_not_callable(self
):
self.obj._set_state(self.obj.OPEN)
self.assertRaises(ValueError, self.obj._validate_channel_and_callback,
'foo')
@mock.patch('logging.Logger.warning')
def test_on_close_warning(self, warning):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.Close(999, 'Test_Value'))
self.obj._on_close(method_frame)
warning.assert_called_with('Received remote Channel.Close (%s): %s',
method_frame.method.reply_code,
method_frame.method.reply_text)
| |
"""The dct module provide helpers functions to work with experimental diffraction contrast tomography data.
"""
import os
import h5py
import numpy as np
from scipy import ndimage
from matplotlib import pyplot as plt, cm
from pymicro.xray.experiment import ForwardSimulation
from pymicro.crystal.lattice import HklPlane
from pymicro.xray.xray_utils import lambda_keV_to_nm, radiograph, radiographs
from pymicro.crystal.microstructure import Grain, Orientation
class DctForwardSimulation(ForwardSimulation):
"""Class to represent a Forward Simulation."""
def __init__(self, verbose=False):
super(DctForwardSimulation, self).__init__('dct', verbose=verbose)
self.hkl_planes = []
self.check = 1 # grain id to display infos in verbose mode
self.omegas = None
self.reflections = []
def set_hkl_planes(self, hkl_planes):
self.hkl_planes = hkl_planes
def set_diffracting_famillies(self, hkl_list):
"""Set the list of diffracting hk planes using a set of families."""
symmetry = self.exp.get_sample().get_material().get_symmetry()
hkl_planes = []
for hkl in hkl_list:
# here we set include_friedel_pairs to False as we take it into account in the calculation
planes = HklPlane.get_family(hkl, include_friedel_pairs=True, crystal_structure=symmetry)
for plane in planes: # fix the lattice
plane.set_lattice(self.exp.get_sample().get_material())
hkl_planes.extend(planes)
self.set_hkl_planes(hkl_planes)
def setup(self, omega_step, grain_ids=None):
"""Setup the forward simulation.
:param float omega_step: the angular integration step (in degrees) use to compute the diffraction comditions.
:param list grain_ids: a list of grain ids to restrict the forward simulation (use all grains by default).
"""
assert self.exp.source.min_energy == self.exp.source.max_energy # monochromatic case
lambda_keV = self.exp.source.max_energy
self.omegas = np.linspace(0.0, 360.0, num=int(360.0 / omega_step), endpoint=False)
self.reflections = []
for omega in self.omegas:
self.reflections.append([])
if grain_ids:
# make a list of the grains selected for the forward simulation
grains = [self.exp.sample.microstructure.get_grain(gid) for gid in grain_ids]
else:
grains = self.exp.sample.microstructure.grains
for g in grains:
for plane in self.hkl_planes:
(h, k, i, l) = HklPlane.three_to_four_indices(*plane.miller_indices())
try:
(w1, w2) = g.dct_omega_angles(plane, lambda_keV, verbose=False)
except ValueError:
if self.verbose:
print('plane {} does not fulfil the Bragg condition for grain {:d}'.format((h, k, i, l), g.id))
continue
# add angles for Friedel pairs
w3 = (w1 + 180.) % 360
w4 = (w2 + 180.) % 360
if self.verbose and g.id == self.check:
print('grain %d, angles for plane %d%d%d: w1=%.3f and w2=%.3f | delta=%.1f' % (g.id, h, k, l, w1, w2, w1-w2))
print('(%3d, %3d, %3d, %3d) -- %6.2f & %6.2f' % (h, k, i, l, w1, w2))
self.reflections[int(w1 / omega_step)].append([g.id, (h, k, l)])
self.reflections[int(w2 / omega_step)].append([g.id, (h, k, l)])
self.reflections[int(w3 / omega_step)].append([g.id, (-h, -k, -l)])
self.reflections[int(w4 / omega_step)].append([g.id, (-h, -k, -l)])
def load_grain(self, gid=1):
print('loading grain from file 4_grains/phase_01/grain_%04d.mat' % gid)
with h5py.File(os.path.join(self.exp.get_sample().data_dir, '4_grains/phase_01/grain_%04d.mat' % gid)) as gmat:
g = Grain(gid, Orientation.from_rodrigues(gmat['R_vector'][()]))
g.om_exp = gmat['om_exp'][0, :]
g.uv_exp = gmat['uv_exp'][:, :]
g.center = gmat['center'][:, 0]
try:
ref_included = gmat['proj/included'][0][0]
g.included = gmat[ref_included][0, :]
ref_ondet = gmat['proj/ondet'][0][0]
g.ondet = gmat[ref_ondet][0, :]
# grab the projection stack
ref_stack = gmat['proj']['stack'][0][0]
g.stack_exp = gmat[ref_stack][()].transpose(1, 2, 0) # now in [ndx, u, v] form
g.hklsp = gmat['allblobs/hklsp'][:, :]
except AttributeError:
# classic file organization
g.included = gmat['proj/included'][0, :]
g.ondet = gmat['proj/ondet'][0, :]
g.stack_exp = gmat['proj/stack'][()].transpose(1, 2, 0) # now in [ndx, u, v] form
# for the Ti7AL data set, we have to hack around the DCT + TT work in progress
#ref_hklsp = gmat['allblobs/hklsp'][()][0][0]
#g.hklsp = gmat[ref_hklsp][:, :]
g.hklsp = gmat['allblobs/hklsp'][:, :]
self.grain = g
if self.verbose:
print('experimental proj stack shape: {}'.format(g.stack_exp.shape))
def grain_projection_image(self, g_uv, g_proj):
"""Produce a 2D image placing all diffraction spots of a given grain at their respective position on the detector.
Spots outside the detector are are skipped while those only partially on the detector are cropped accordingly.
:param g_proj: image stack of the diffraction spots. The first axis is so that g_proj[0] is the first spot \
the second axis is the horizontal coordinate of the detector (u) and the third axis the vertical coordinate \
of the detector (v).
:param g_uv: list or array of the diffraction spot position.
:returns: a 2D composite image of all the diffraction spots.
"""
print(len(g_proj), g_uv.shape[1])
assert len(g_proj) == g_uv.shape[1]
image = np.zeros(self.exp.get_active_detector().get_size_px())
for i in range(len(g_proj)):
spot = g_proj[i]
if self.verbose:
print('i={0}, size of spot: {1}'.format(i, spot.shape))
print('placing diffraction spot at location {0}'.format(g_uv[:, i]))
add_to_image(image, spot, g_uv[:, i], self.verbose)
return image
def grain_projection_exp(self, gid=1):
"""Produce a composite image with all the experimental diffraction spots of this grain on the detector.
:param int gid: the number of the selected grain.
:returns: a 2D composite image of all the diffraction spots.
"""
#self.grain = self.exp.get_sample().get_microstructure().get_grain(gid)
if not hasattr(self, 'grain') or self.grain.id != gid:
# load the corresponding grain
self.load_grain(gid=gid)
return self.grain_projection_image(self.grain.uv_exp, self.grain.stack_exp)
def grain_projections(self, omegas, gid=1, data=None, hor_flip=False, ver_flip=False):
"""Compute the projections of a grain at different rotation angles.
The method compute each projection and concatenate them into a single 3D array in the form [n, u, v]
with n the number of angles.
:param list omegas: the list of omega angles to use (in degrees).
:param int gid: the id of the grain to project (1 default).
:param ndarray data: the data array representing the grain.
:param bool hor_flip: a flag to apply a horizontal flip.
:param bool ver_flip: a flag to apply a vertical flip.
:return: a 3D array containing the n projections.
"""
from scipy import ndimage
if data is None:
grain_ids = self.exp.get_sample().get_grain_ids()
print('binarizing grain %d' % gid)
data = np.where(grain_ids[ndimage.find_objects(grain_ids == gid)[0]] == gid, 1, 0)
print('shape of binary grain is {}'.format(data.shape))
stack_sim = radiographs(data, omegas)
stack_sim = stack_sim.transpose(2, 0, 1)[:, ::-1, ::-1]
# here we need to account for the detector flips (detector is always supposed to be perpendicular to the beam)
# by default (u, v) correspond to (-Y, -Z)
if hor_flip:
print('applying horizontal flip to the simulated image stack')
stack_sim = stack_sim[:, ::-1, :]
if ver_flip:
print('applying vertical flip to the simulated image stack')
stack_sim = stack_sim[:, :, ::-1]
return stack_sim
def grain_projection_simulation(self, gid=1):
"""Function to compute all the grain projection in DCT geometry and create a composite image.
:param int gid: the id of the grain to project (1 default).
"""
print('forward simulation of grain %d' % gid)
detector = self.exp.get_active_detector()
lambda_keV = self.exp.source.max_energy
lambda_nm = lambda_keV_to_nm(lambda_keV)
X = np.array([1., 0., 0.]) / lambda_nm
lattice = self.exp.get_sample().get_material()
if not hasattr(self, 'grain'):
# load the corresponding grain
self.load_grain(gid=gid)
# compute all the omega values
print('simulating diffraction spot positions on the detector')
omegas = np.zeros(2 * len(self.hkl_planes))
g_uv = np.zeros((2, 2 * len(self.hkl_planes)))
for i, plane in enumerate(self.hkl_planes):
#print(plane.miller_indices())
try:
w1, w2 = self.grain.dct_omega_angles(plane, lambda_keV, verbose=False)
except ValueError:
# plane does not fulfil the Bragg condition
continue
omegas[2 * i] = w1
omegas[2 * i + 1] = w2
for j in range(2):
omega = omegas[2 * i + j]
omegar = omega * np.pi / 180
R = np.array([[np.cos(omegar), -np.sin(omegar), 0], [np.sin(omegar), np.cos(omegar), 0], [0, 0, 1]])
gt = self.grain.orientation_matrix().transpose()
G = np.dot(R, np.dot(gt, plane.scattering_vector()))
K = X + G
# position of the grain at this rotation angle
g_pos_rot = np.dot(R, self.grain.center)
pg = detector.project_along_direction(K, g_pos_rot)
(up, vp) = detector.lab_to_pixel(pg)[0]
g_uv[:, 2 * i + j] = up, vp
# check detector flips
hor_flip = np.dot(detector.u_dir, [0, -1, 0]) < 0
ver_flip = np.dot(detector.v_dir, [0, 0, -1]) < 0
if self.verbose:
print(detector.u_dir)
print(detector.v_dir)
print('detector horizontal flip: %s' % hor_flip)
print('detector vertical flip: %s' % ver_flip)
# compute the projections
stack_sim = self.grain_projections(omegas, gid, hor_flip=hor_flip, ver_flip=ver_flip)
return self.grain_projection_image(g_uv, stack_sim)
def dct_projection(self, omega, include_direct_beam=True, att=5):
"""Function to compute a full DCT projection at a given omega angle.
:param float omega: rotation angle in degrees.
:param bool include_direct_beam: flag to compute the transmission through the sample.
:param float att: an attenuation factor used to limit the gray levels in the direct beam.
:return: the dct projection as a 2D numpy array
"""
if len(self.reflections) == 0:
print('empty list of reflections, you should run the setup function first')
return None
grain_ids = self.exp.get_sample().get_grain_ids()
detector = self.exp.get_active_detector()
lambda_keV = self.exp.source.max_energy
lattice = self.exp.get_sample().get_material()
index = np.argmax(self.omegas > omega)
dif_grains = self.reflections[index - 1] # grains diffracting between omegas[index - 1] and omegas[index]
# intialize image result
full_proj = np.zeros(detector.get_size_px(), dtype=np.float)
lambda_nm = lambda_keV_to_nm(lambda_keV)
omegar = omega * np.pi / 180
R = np.array([[np.cos(omegar), -np.sin(omegar), 0], [np.sin(omegar), np.cos(omegar), 0], [0, 0, 1]])
if include_direct_beam:
# add the direct beam part by computing the radiograph of the sample without the diffracting grains
data_abs = np.where(grain_ids > 0, 1, 0)
for (gid, (h, k, l)) in dif_grains:
mask_dif = (grain_ids == gid)
data_abs[mask_dif] = 0 # remove this grain from the absorption
proj = radiograph(data_abs, omega)[:, ::-1] # (u, v) axes correspond to (Y, -Z) for DCT detector
add_to_image(full_proj, proj / att, np.array(full_proj.shape) // 2)
# add diffraction spots
X = np.array([1., 0., 0.]) / lambda_nm
for (gid, (h, k, l)) in dif_grains:
grain_data = np.where(grain_ids == gid, 1, 0)
if np.sum(grain_data) < 1:
print('skipping grain %d' % gid)
continue
local_com = np.array(ndimage.measurements.center_of_mass(grain_data, grain_ids))
print('local center of mass (voxel): {0}'.format(local_com))
g_center_mm = detector.get_pixel_size() * (local_com - 0.5 * np.array(grain_ids.shape))
print('center of mass (voxel): {0}'.format(local_com - 0.5 * np.array(grain_ids.shape)))
print('center of mass (mm): {0}'.format(g_center_mm))
# compute scattering vector
gt = self.exp.get_sample().get_microstructure().get_grain(gid).orientation_matrix().transpose()
p = HklPlane(h, k, l, lattice)
G = np.dot(R, np.dot(gt, p.scattering_vector()))
K = X + G
# position of the grain at this rotation angle
g_pos_rot = np.dot(R, g_center_mm)
pg = detector.project_along_direction(K, g_pos_rot)
up, vp = detector.lab_to_pixel(pg)[0]
if self.verbose:
print('\n* gid=%d, (%d,%d,%d) plane, angle=%.1f' % (gid, h, k, l, omega))
print('diffraction vector:', K)
print('postion of the grain at omega=%.1f is ' % omega, g_pos_rot)
print('up=%d, vp=%d for plane (%d,%d,%d)' % (up, vp, h, k, l))
data_dif = grain_data[ndimage.find_objects(grain_ids == gid)[0]]
proj_dif = radiograph(data_dif, omega) # (Y, Z) coordinate system
add_to_image(full_proj, proj_dif[:, ::-1], (up, vp), self.verbose) # (u, v) axes correspond to (Y, -Z)
return full_proj
def add_to_image(image, inset, uv, verbose=False):
"""Add an image to another image at a specified position.
The inset image may be of any size and may only overlap partly on the overall image depending on the location
specified. In such a case, the inset image is cropped accordingly.
:param np.array image: the master image taht will be modified.
:param np.array inset: the inset to add to the image.
:param tuple uv: the location (center) where to add the inset in the form (u, v).
:param bool verbose: activate verbose mode (False by default).
"""
# round the center to the closest integer value
u = int(uv[0])
v = int(uv[1])
spot_size = inset.shape
u_start = 0
u_end = spot_size[0]
v_start = 0
v_end = spot_size[1]
# check bounds for spots that may be completely or partly outside the image
if (u + spot_size[0] // 2 < 0) or (u - spot_size[0] // 2 > image.shape[0] - 1) or (
v + spot_size[1] // 2 < 0) or (v - spot_size[1] // 2 > image.shape[1] - 1):
if verbose:
print('skipping this spot which is outside the detector area')
return None # spot is completely outside the detector area
if u - spot_size[0] // 2 < 0:
u_start = int(spot_size[0] // 2 - u + 1)
elif u - spot_size[0] // 2 + spot_size[0] > image.shape[0] - 1:
u_end = int(image.shape[0] - (u - spot_size[0] // 2))
if v - spot_size[1] // 2 < 0:
v_start = int(spot_size[1] // 2 - v + 1)
elif v - spot_size[1] // 2 + spot_size[1] > image.shape[1] - 1:
v_end = int(image.shape[1] - (v - spot_size[1] // 2))
# now add spot to the image
image[u - spot_size[0] // 2 + u_start: u - spot_size[0] // 2 + u_end,
v - spot_size[1] // 2 + v_start: v - spot_size[1] // 2 + v_end] \
+= inset[u_start:u_end, v_start:v_end]
def merge_dct_scans(scan_list, samtz_list, use_mask=False, overlap=-1, root_dir='.', write_to_h5=True):
"""Merge two DCT scans.
This function build a `Microstructure` instance for each DCT scan and calls merge_microstructures.
The overlap can be deduced from the samtz values or specified directly.
:param list scan_list: a list with the two DCT scan names.
:param list samtz_list: a list with the two samtz value (the order should match the scan names).
:param bool use_mask: a flag to also merge the absorption masks.
:param int overlap: the value to use for the overlap if not computed automatically.
:param str root_dir: the root data folder.
:param bool write_to_h5: flag to write the result of the merging operation to an HDF5 file.
:return: A new `Microstructure` instance of the 2 merged scans.
"""
from pymicro.crystal.microstructure import Microstructure
import numpy as np
import os
import h5py
scan_shapes = [] # warning, shapes will be in (z, y, x) form
micros = []
for scan in scan_list:
scan_path = os.path.join(root_dir, scan, '5_reconstruction', 'phase_01_vol.mat')
with h5py.File(scan_path) as f:
scan_shapes.append(f['vol'].shape)
print(f['vol'].shape)
# figure out the maximum cross section
max_shape = np.array(scan_shapes).max(axis=0)[[2, 1, 0]]
for scan in scan_list:
# read microstructure for this scan
dct_analysis_dir = os.path.join(root_dir, scan)
print('processing scan %s' % scan)
micro = Microstructure.from_dct(data_dir=dct_analysis_dir)
print('voxel_size is {}'.format(micro.voxel_size))
# pad both grain map and mask
print('max shape is {}'.format(max_shape))
print('vol shape is {}'.format(micro.grain_map.shape))
offset = max_shape - micro.grain_map.shape
offset[2] = 0 # do not pad along Z
padding = [(o // 2, max_shape[0] - micro.grain_map.shape[0] - o // 2) for o in offset]
print('padding is {}'.format(padding))
micro.grain_map = np.pad(micro.grain_map, padding, mode='constant')
print('has mask ? {}'.format(hasattr(micro, 'mask')))
if use_mask:
micro.mask = np.pad(micro.mask, padding, mode='constant')
elif hasattr(micro, 'mask'):
print('deleting mask attribute since we do not want to use it')
delattr(micro, 'mask')
micros.append(micro)
# find out the overlap region (based on the difference in samtz)
overlap_from_samtz = int((samtz_list[1] + scan_shapes[1][0] // 2 * micros[1].voxel_size) / micros[1].voxel_size
- (samtz_list[0] - scan_shapes[0][0] // 2 * micros[0].voxel_size) / micros[0].voxel_size)
print('vertical overlap deduced from samtz positions is %d voxels' % overlap_from_samtz)
if overlap < 0:
overlap = overlap_from_samtz
print('using an actual overlap of %d voxels' % overlap)
# we have prepared the 2 microstructures, now merge them
merged_micro = Microstructure.merge_microstructures(micros, overlap, plot=True)
if write_to_h5:
# write the result
merged_micro.to_h5()
return merged_micro
def all_dif_spots(g_proj, g_uv, verbose=False):
"""Produce a 2D image placing all diffraction spots at their respective position on the detector.
Spots outside the detector are are skipped while those only partially on the detector are cropped accordingly.
:param g_proj: image stack of the diffraction spots. The first axis is so that g_proj[0] is the first spot \
the second axis is the horizontal coordinate of the detector (u) and the third axis the vertical coordinate \
of the detector (v).
:param g_uv: list or array of the diffraction spot position.
:param bool verbose: activate verbose mode (False by default).
:returns: a 2D composite image of all the diffraction spots.
"""
# TODO add a detector object to account for the image size and the position of the direct beam
image = np.zeros((2048, 2048), dtype=g_proj.dtype)
print(g_proj.shape[0], len(g_uv))
assert g_proj.shape[0] == len(g_uv)
for i in range(g_proj.shape[0]):
spot = g_proj[i]
if verbose:
print('i={0}, size of spot: {1}'.format(i, spot.shape))
print('placing diffraction spot at location {0}'.format(g_uv[i]))
add_to_image(image, spot, g_uv[i], verbose)
return image
def plot_all_dif_spots(gid, detector, hkl_miller=None, uv=None, lattice=None, lambda_keV=None,
spots=True, dif_spots_image=None, max_value=None, positions=True, debye_rings=True, suffix=''):
plt.figure(figsize=(13, 8))
# plt.figure()
if spots and dif_spots_image is not None:
if not max_value:
max_value = dif_spots_image.max()
plt.imshow(dif_spots_image.T, cmap=cm.gray, vmin=0, vmax=max_value)
families = []
indices = []
colors = 'crbgmy' # crbgmycrbgmycrbgmycrbgmy' # use a cycler here
t = np.linspace(0.0, 2 * np.pi, num=37)
if positions or debye_rings:
if hkl_miller is None:
raise ValueError(
'The list of miller indices of each reflection must be provided using variable g_hkl_miller')
for i, (h, k, l) in enumerate(hkl_miller):
l = [abs(h), abs(k), abs(l)]
# l.sort() # miller indices are now sorted, should use the lattice symmetry here
family_name = '%d%d%d' % (l[0], l[1], l[2])
if families.count(family_name) == 0:
families.append(family_name)
indices.append(i)
indices.append(len(hkl_miller))
print(families, indices)
# now plot each family
for i in range(len(families)):
family = families[i]
c = colors[i % len(colors)]
if positions and uv is not None:
plt.plot(uv[indices[i]:indices[i + 1], 0], uv[indices[i]:indices[i + 1], 1],
's', color=c, label=family)
if debye_rings and lattice is not None and lambda_keV:
theta = HklPlane(int(family[0]), int(family[1]), int(family[2]), lattice).bragg_angle(lambda_keV)
L = detector.ref_pos[0] / detector.pixel_size * np.tan(2 * theta) # 2 theta distance on the detector
print('2theta = %g, L = %g' % (2 * theta * 180 / np.pi, L))
plt.plot(0.5 * detector.size[0] + L * np.cos(t), 0.5 * detector.size[1] + L * np.sin(t), '--', color=c)
plt.title('grain %d diffraction spot locations on the detector' % gid)
# plt.legend(numpoints=1, loc='center')
plt.legend(numpoints=1, ncol=2, bbox_to_anchor=(1.02, 1), loc=2)
# plt.axis('equal')
plt.axis([0, 2047, 2047, 0])
plt.savefig('g%d%s_difspot_positions.pdf' % (gid, suffix))
def output_tikzpicture(proj_dif, omegas, gid=1, d_uv=[0, 0], suffix=''):
axes = ['horizontal', 'vertical']
for i, d in enumerate(d_uv):
if d:
# pad the corresponding axis if needed
p = (d - proj_dif.shape[i + 1]) // 2
if p > 0:
print('padding %s axis with %d zeros' % (axes[i], p))
if i == 0:
proj_dif_pad = np.pad(proj_dif, ((0, 0), (p, p), (0, 0)), 'constant')
else:
proj_dif_pad = np.pad(proj_dif, ((0, 0), (0, 0), (p, p)), 'constant')
else:
d_uv[i] = proj_dif.shape[i + 1]
print('output proj images will be {0}'.format(d_uv))
N = proj_dif.shape[0]
for i in range(N):
proj_path = os.path.join('proj_dif', 'g%d_proj%s_%02d.png' % (gid, suffix, i))
# proj_dif is in (u, v) form so as usual, we need to save the transposed array with imsave
plt.imsave(proj_path,
proj_dif[i, (proj_dif.shape[1] - d_uv[0]) // 2:(proj_dif.shape[1] + d_uv[0]) // 2,
(proj_dif.shape[2] - d_uv[1]) // 2:(proj_dif.shape[2] + d_uv[1]) // 2].T,
cmap=cm.gray, origin='upper')
# HST_write(proj_dif, 'g4_proj_stack.raw')
# image ratio is 4/3 so we will use a n x m matrix with
L = 10.0 # cm
H = 3 * L / 4 # cm
n = 0
while (3. / 4 * n ** 2) < N:
n += 1
m = int(3. * n / 4)
# size of a picture (assuming square)
l = L / n
tex_path = os.path.join('proj_dif', 'g%d_proj%s.tex' % (gid, suffix))
print('building a tikzpicture with {0}x{1} miniatures, output file {2}'.format(n, m, tex_path))
f = open(tex_path, 'w')
f.write('\\documentclass{article}')
f.write('\\usepackage[latin1]{inputenc}\n')
f.write('\\usepackage{tikz}\n')
f.write('\\usetikzlibrary{shapes,arrows}\n')
f.write('\\usepackage{xcolor}\n')
f.write('\\pagecolor{black}\n')
f.write('\\usepackage[active,tightpage]{preview}\n')
f.write('\\PreviewEnvironment{tikzpicture}\n')
f.write('\\setlength\PreviewBorder{0pt}\n')
f.write('\\begin{document}\n')
f.write('\\pagestyle{empty}\n')
f.write('\\sffamily\n')
f.write('\\fontsize{5}{6}\n')
f.write('\\begin{tikzpicture}[white]\n')
# f.write(
# '\\filldraw[black] (%.3f,%.3f) rectangle (%.3f,%.3f);\n' % (-0.5 * l, 0.5 * l, L - 0.5 * l, -m * l + 0.5 * l))
for j in range(m):
Y = -j * l
for i in range(n):
X = i * l
index = j * n + i
if index >= N:
continue # skip last incomplete line
f.write('\\node at (%.2f,%.2f) {\includegraphics[width=%.2fcm]{g%d_proj%s_%02d.png}};\n' % (
X, Y, l, gid, suffix, index))
# f.write('\\node at (%.2f,%.2f) {$\omega=%.1f^\circ$};\n' % (X, Y - 0.5 * l, omegas[index]))
f.write('\\node at (%.2f,%.2f) {$%.1f^\circ$};\n' % (X, Y - 0.5 * l, omegas[index]))
f.write('\\end{tikzpicture}\n')
f.write('\\end{document}\n')
f.close()
def tt_rock(scan_name, data_dir='.', n_topo=-1, mask=None, dark_factor=1.):
from pymicro.file.file_utils import edf_read
# parse the info file
f = open(os.path.join(data_dir, scan_name, '%s.info' % scan_name))
infos = dict()
for line in f.readlines():
tokens = line.split('=')
# convert the value into int/float/str depending on the case
try:
value = int(tokens[1].strip())
except ValueError:
try:
value = float(tokens[1].strip())
except ValueError:
value = tokens[1].strip()
infos[tokens[0]] = value
print(infos)
if n_topo < 0:
import glob
# figure out the number of frames per topograph n_topo
n_frames = len(glob.glob(os.path.join(data_dir, scan_name, '%s*.edf' % scan_name)))
n_topo = int(n_frames / infos['TOMO_N'])
print('number of frames to sum for a topograph = %d' % n_topo)
# handle mask
if mask is None:
mask = np.ones((infos['Dim_1'], infos['Dim_2'], infos['TOMO_N']), dtype=np.uint8)
else:
# double check mask size
if not (mask.shape[0] == infos['Dim_1'] and mask.shape[1] == infos['Dim_2']
and mask.shape[2] == infos['TOMO_N']):
print('wrong mask size: {}, should be ({}, {}, {})'.format(
mask.shape, infos['Dim_1'], infos['Dim_2'], infos['TOMO_N']))
mask = np.ones((infos['Dim_1'], infos['Dim_2'], infos['TOMO_N']), dtype=np.uint8)
# load dark image
dark = dark_factor * edf_read(os.path.join(data_dir, scan_name, 'darkend0000.edf'))
print('dark average: ' % np.mean(dark))
# build the stack by combining individual images
tt_rock = np.empty((infos['TOMO_N'], n_topo), dtype=float)
for n in range(int(infos['TOMO_N'])):
print('computing rocking curve %d' % (n + 1), end='\r')
offset = n_topo * n
for i in range(n_topo):
index = offset + i + 1
frame_path = os.path.join(data_dir, scan_name, '%s%04d.edf' % (scan_name, index))
im = edf_read(frame_path) - dark
tt_rock[n, i] = np.sum(im * mask[:, :, n])
print('\ndone')
return tt_rock
def tt_stack(scan_name, data_dir='.', save_edf=False, n_topo=-1, dark_factor=1.):
"""Build a topotomography stack from raw detector images.
The number of image to sum for a topograph can be determined automatically
from the total number of images present in `data_dir` or directly specified
using the variable `TOPO_N`.
:param str scan_name: the name of the scan to process.
:param str data_dir: the path to the data folder.
:param bool save_edf: flag to save the tt stack as an EDF file.
:param int n_topo: the number of images to sum for a topograph.
:param float dark_factor: a multiplicative factor for the dark image.
"""
from pymicro.file.file_utils import edf_read, edf_write
if n_topo < 0:
import glob
# figure out the number of frames per topograph n_topo
n_frames = len(glob.glob(os.path.join(data_dir, scan_name, '%s*.edf' % scan_name)))
n_topo = int(n_frames / 90)
print('number of frames to sum for a topograph = %d' % n_topo)
# parse the info file
f = open(os.path.join(data_dir, scan_name, '%s.info' % scan_name))
infos = dict()
for line in f.readlines():
tokens = line.split('=')
# convert the value into int/float/str depending on the case
try:
value = int(tokens[1].strip())
except ValueError:
try:
value = float(tokens[1].strip())
except ValueError:
value = tokens[1].strip()
infos[tokens[0]] = value
print(infos)
# load dark image
dark = dark_factor * edf_read(os.path.join(data_dir, scan_name, 'darkend0000.edf'))
# build the stack by combining individual images
tt_stack = np.empty((infos['TOMO_N'], infos['Dim_1'], infos['Dim_2']))
print(tt_stack[0].shape)
for n in range(int(infos['TOMO_N'])):
print('building topograph %d' % (n + 1), end='\r')
topograph = np.zeros((infos['Dim_1'], infos['Dim_2']))
offset = n_topo * n
for i in range(n_topo):
index = offset + i + 1
frame_path = os.path.join(data_dir, scan_name, '%s%04d.edf' % (scan_name, index))
im = edf_read(frame_path) - dark
topograph += im
tt_stack[n] = topograph
tt_stack = tt_stack.transpose((1, 2, 0))
print('\ndone')
# save the data as edf if needed
if save_edf:
edf_write(tt_stack, os.path.join(data_dir, '%sstack.edf' % scan_name))
return tt_stack
| |
# -*- coding: utf-8 -*-
"""`PEP 3101`_ introduced the :meth:`str.format` method, and what
would later be called "new-style" string formatting. For the sake of
explicit correctness, it is probably best to refer to Python's dual
string formatting capabilities as *bracket-style* and
*percent-style*. There is overlap, but one does not replace the
other.
* Bracket-style is more pluggable, slower, and uses a method.
* Percent-style is simpler, faster, and uses an operator.
Bracket-style formatting brought with it a much more powerful toolbox,
but it was far from a full one. :meth:`str.format` uses `more powerful
syntax`_, but `the tools and idioms`_ for working with
that syntax are not well-developed nor well-advertised.
``formatutils`` adds several functions for working with bracket-style
format strings:
* :class:`DeferredValue`: Defer fetching or calculating a value
until format time.
* :func:`get_format_args`: Parse the positional and keyword
arguments out of a format string.
* :func:`tokenize_format_str`: Tokenize a format string into
literals and :class:`BaseFormatField` objects.
* :func:`construct_format_field_str`: Assists in progammatic
construction of format strings.
* :func:`infer_positional_format_args`: Converts anonymous
references in 2.7+ format strings to explicit positional arguments
suitable for usage with Python 2.6.
.. _more powerful syntax: https://docs.python.org/2/library/string.html#format-string-syntax
.. _the tools and idioms: https://docs.python.org/2/library/string.html#string-formatting
.. _PEP 3101: https://www.python.org/dev/peps/pep-3101/
"""
# TODO: also include percent-formatting utils?
# TODO: include lithoxyl.formatters.Formatter (or some adaptation)?
from __future__ import print_function
import re
from string import Formatter
__all__ = ['DeferredValue', 'get_format_args', 'tokenize_format_str',
'construct_format_field_str', 'infer_positional_format_args',
'BaseFormatField']
_pos_farg_re = re.compile('({{)|' # escaped open-brace
'(}})|' # escaped close-brace
'({[:!.\[}])') # anon positional format arg
def construct_format_field_str(fname, fspec, conv):
"""
Constructs a format field string from the field name, spec, and
conversion character (``fname``, ``fspec``, ``conv``). See Python
String Formatting for more info.
"""
if fname is None:
return ''
ret = '{' + fname
if conv:
ret += '!' + conv
if fspec:
ret += ':' + fspec
ret += '}'
return ret
def split_format_str(fstr):
"""Does very basic spliting of a format string, returns a list of
strings. For full tokenization, see :func:`tokenize_format_str`.
"""
ret = []
for lit, fname, fspec, conv in Formatter().parse(fstr):
if fname is None:
ret.append((lit, None))
continue
field_str = construct_format_field_str(fname, fspec, conv)
ret.append((lit, field_str))
return ret
def infer_positional_format_args(fstr):
"""Takes format strings with anonymous positional arguments, (e.g.,
"{}" and {:d}), and converts them into numbered ones for explicitness and
compatibility with 2.6.
Returns a string with the inferred positional arguments.
"""
# TODO: memoize
ret, max_anon = '', 0
# look for {: or {! or {. or {[ or {}
start, end, prev_end = 0, 0, 0
for match in _pos_farg_re.finditer(fstr):
start, end, group = match.start(), match.end(), match.group()
if prev_end < start:
ret += fstr[prev_end:start]
prev_end = end
if group == '{{' or group == '}}':
ret += group
continue
ret += '{%s%s' % (max_anon, group[1:])
max_anon += 1
ret += fstr[prev_end:]
return ret
# This approach is hardly exhaustive but it works for most builtins
_INTCHARS = 'bcdoxXn'
_FLOATCHARS = 'eEfFgGn%'
_TYPE_MAP = dict([(x, int) for x in _INTCHARS] +
[(x, float) for x in _FLOATCHARS])
_TYPE_MAP['s'] = str
def get_format_args(fstr):
"""
Turn a format string into two lists of arguments referenced by the
format string. One is positional arguments, and the other is named
arguments. Each element of the list includes the name and the
nominal type of the field.
# >>> get_format_args("{noun} is {1:d} years old{punct}")
# ([(1, <type 'int'>)], [('noun', <type 'str'>), ('punct', <type 'str'>)])
# XXX: Py3k
>>> get_format_args("{noun} is {1:d} years old{punct}") == \
([(1, int)], [('noun', str), ('punct', str)])
True
"""
# TODO: memoize
formatter = Formatter()
fargs, fkwargs, _dedup = [], [], set()
def _add_arg(argname, type_char='s'):
if argname not in _dedup:
_dedup.add(argname)
argtype = _TYPE_MAP.get(type_char, str) # TODO: unicode
try:
fargs.append((int(argname), argtype))
except ValueError:
fkwargs.append((argname, argtype))
for lit, fname, fspec, conv in formatter.parse(fstr):
if fname is not None:
type_char = fspec[-1:]
fname_list = re.split('[.[]', fname)
if len(fname_list) > 1:
raise ValueError('encountered compound format arg: %r' % fname)
try:
base_fname = fname_list[0]
assert base_fname
except (IndexError, AssertionError):
raise ValueError('encountered anonymous positional argument')
_add_arg(fname, type_char)
for sublit, subfname, _, _ in formatter.parse(fspec):
# TODO: positional and anon args not allowed here.
if subfname is not None:
_add_arg(subfname)
return fargs, fkwargs
def tokenize_format_str(fstr, resolve_pos=True):
"""Takes a format string, turns it into a list of alternating string
literals and :class:`BaseFormatField` tokens. By default, also
infers anonymous positional references into explict, numbered
positional references. To disable this behavior set *resolve_pos*
to ``False``.
"""
ret = []
if resolve_pos:
fstr = infer_positional_format_args(fstr)
formatter = Formatter()
for lit, fname, fspec, conv in formatter.parse(fstr):
if lit:
ret.append(lit)
if fname is None:
continue
ret.append(BaseFormatField(fname, fspec, conv))
return ret
class BaseFormatField(object):
"""A class representing a reference to an argument inside of a
bracket-style format string. For instance, in ``"{greeting},
world!"``, there is a field named "greeting".
These fields can have many options applied to them. See the
Python docs on `Format String Syntax`_ for the full details.
.. _Format String Syntax: https://docs.python.org/2/library/string.html#string-formatting
"""
def __init__(self, fname, fspec='', conv=None):
self.set_fname(fname)
self.set_fspec(fspec)
self.set_conv(conv)
def set_fname(self, fname):
"Set the field name."
path_list = re.split('[.[]', fname) # TODO
self.base_name = path_list[0]
self.fname = fname
self.subpath = path_list[1:]
self.is_positional = not self.base_name or self.base_name.isdigit()
def set_fspec(self, fspec):
"Set the field spec."
fspec = fspec or ''
subfields = []
for sublit, subfname, _, _ in Formatter().parse(fspec):
if subfname is not None:
subfields.append(subfname)
self.subfields = subfields
self.fspec = fspec
self.type_char = fspec[-1:]
self.type_func = _TYPE_MAP.get(self.type_char, str)
def set_conv(self, conv):
"""There are only two built-in converters: ``s`` and ``r``. They are
somewhat rare and appearlike ``"{ref!r}"``."""
# TODO
self.conv = conv
self.conv_func = None # TODO
@property
def fstr(self):
"The current state of the field in string format."
return construct_format_field_str(self.fname, self.fspec, self.conv)
def __repr__(self):
cn = self.__class__.__name__
args = [self.fname]
if self.conv is not None:
args.extend([self.fspec, self.conv])
elif self.fspec != '':
args.append(self.fspec)
args_repr = ', '.join([repr(a) for a in args])
return '%s(%s)' % (cn, args_repr)
def __str__(self):
return self.fstr
_UNSET = object()
class DeferredValue(object):
""":class:`DeferredValue` is a wrapper type, used to defer computing
values which would otherwise be expensive to stringify and
format. This is most valuable in areas like logging, where one
would not want to waste time formatting a value for a log message
which will subsequently be filtered because the message's log
level was DEBUG and the logger was set to only emit CRITICAL
messages.
The :class:``DeferredValue`` is initialized with a callable that
takes no arguments and returns the value, which can be of any
type. By default DeferredValue only calls that callable once, and
future references will get a cached value. This behavior can be
disabled by setting *cache_value* to ``False``.
Args:
func (function): A callable that takes no arguments and
computes the value being represented.
cache_value (bool): Whether subsequent usages will call *func*
again. Defaults to ``True``.
>>> import sys
>>> dv = DeferredValue(lambda: len(sys._current_frames()))
>>> output = "works great in all {0} threads!".format(dv)
PROTIP: To keep lines shorter, use: ``from formatutils import
DeferredValue as DV``
"""
def __init__(self, func, cache_value=True):
self.func = func
self.cache_value = True
self._value = _UNSET
def get_value(self):
"""Computes, optionally caches, and returns the value of the
*func*. If ``get_value()`` has been called before, a cached
value may be returned depending on the *cache_value* option
passed to the constructor.
"""
if self._value is _UNSET or not self.cache_value:
value = self.func()
if self.cache_value:
self._value = value
return value
def __int__(self):
return int(self.get_value())
def __float__(self):
return float(self.get_value())
def __str__(self):
return str(self.get_value())
def __unicode__(self):
return unicode(self.get_value())
def __repr__(self):
return repr(self.get_value())
def __format__(self, fmt):
value = self.get_value()
pt = fmt[-1:] # presentation type
type_conv = _TYPE_MAP.get(pt, str)
try:
return value.__format__(fmt)
except (ValueError, TypeError):
# TODO: this may be overkill
return type_conv(value).__format__(fmt)
# end formatutils.py
| |
import fileinput
from typing import Union
from dataclasses import dataclass
from copy import deepcopy
@dataclass
class Pair:
left: Union['Pair', int] = None
right: Union['Pair', int] = None
parent: 'Pair' = None
def __str__(self) -> str:
l = 'node' if type(self.left) is Pair else str(self.left)
r = 'node' if type(self.right) is Pair else str(self.right)
return f'[{l},{r}]'
def addchild(self, c: Union['Pair', int]):
if type(c) is Pair:
c.parent = self
if self.left is None:
self.left = c
elif self.right is None:
self.right = c
else:
assert False
def _inorder(self):
l = self.left._inorder() if type(self.left) is Pair else []
r = self.right._inorder() if type(self.right) is Pair else []
return l + [self] + r
def _explode(self):
print('exploding', self)
root = self
while root.parent:
root = root.parent
o = root._inorder()
# print(o)
lval, rval = self.left, self.right
node = None
for p in o:
if p is self:
break
if type(p.right) is int or type(p.left) is int:
node = p
if node:
if type(node.right) is int:
node.right += lval
elif type(node.left) is int:
node.left += lval
print('left', node)
node = None
while o:
p = o.pop()
if p is self:
break
if type(p.right) is int or type(p.left) is int:
node = p
print('right', node)
if node:
if type(node.left) is int:
node.left += rval
elif type(node.right) is int:
node.right += rval
# print(self.tos())
if self.parent and self.parent.left is self:
self.parent.left = 0
elif self.parent and self.parent.right is self:
self.parent.right = 0
def _split(self, val):
return Pair(val // 2, (val + 1) // 2)
def _reduce(self, dep=0):
# explode
if dep == 4:
self._explode()
return True
if type(self.left) is Pair:
if self.left._reduce(dep + 1):
return True
if type(self.right) is Pair:
if self.right._reduce(dep + 1):
return True
# split
if type(self.left) is int and self.left >= 10:
self.left = self._split(self.left)
self.left.parent = self
return True
if type(self.right) is int and self.right >= 10:
self.right = self._split(self.right)
self.right.parent = self
return True
return False
def _rec_explode(self, dep=0):
if dep == 4:
self._explode()
return True
if type(self.left) is Pair:
if self.left._rec_explode(dep + 1):
return True
if type(self.right) is Pair:
if self.right._rec_explode(dep + 1):
return True
return False
def _rec_split(self):
if type(self.left) is int and self.left >= 10:
self.left = self._split(self.left)
self.left.parent = self
return True
elif type(self.left) is Pair:
if self.left._rec_split():
return True
if type(self.right) is int and self.right >= 10:
self.right = self._split(self.right)
self.right.parent = self
return True
elif type(self.right) is Pair:
if self.right._rec_split():
return True
return False
def reduce(self):
print('-----------------')
print('reducing', self.tos())
while True:
print(self.tos())
if self._rec_explode():
continue
elif self._rec_split():
continue
else:
break
return self
def mag(self):
if type(self.left) is Pair:
self.left = self.left.mag()
if type(self.right) is Pair:
self.right = self.right.mag()
return self.left * 3 + self.right * 2
def tos(self):
if type(self.left) is int:
l = str(self.left)
else:
l = self.left.tos()
if type(self.right) is int:
r = str(self.right)
else:
r = self.right.tos()
return f'[{l},{r}]'
def toi(c: str):
if c.isdigit():
return int(c)
return c
def parse(l):
tokens = [toi(c) for c in l if c != ',']
root, stack = None, []
for t in tokens:
if t == '[':
p = Pair()
if stack:
stack[-1].addchild(p)
stack.append(p)
continue
if t == ']':
root = stack.pop()
continue
assert len(stack) > 0
stack[-1].addchild(t)
# print(root.tos())
return root
lines = [parse(l.strip()) for l in fileinput.input()]
def reducer(a, b):
a, b = deepcopy(a), deepcopy(b)
root = Pair(a, b)
a.parent = b.parent = root
p = root.reduce()
print(p.tos())
return p
print(max(reducer(a, b).mag() for a in lines for b in lines if a is not b))
| |
"""
Search indexing classes to index into Elasticsearch.
Django settings that should be defined:
`ES_HOSTS`: A list of hosts where Elasticsearch lives. E.g.
['192.168.1.1:9200', '192.168.2.1:9200']
`ES_DEFAULT_NUM_REPLICAS`: An integer of the number of replicas.
`ES_DEFAULT_NUM_SHARDS`: An integer of the number of shards.
TODO: Handle page removal case in Page.
"""
import datetime
from elasticsearch import Elasticsearch, exceptions
from elasticsearch.helpers import bulk_index
from django.conf import settings
class Index(object):
"""
Base class to define some common methods across indexes.
"""
# The _index and _type define the URL path to Elasticsearch, e.g.:
# http://localhost:9200/{_index}/{_type}/_search
_index = 'readthedocs'
_type = None
def __init__(self):
self.es = Elasticsearch(settings.ES_HOSTS)
def get_settings(self, settings_override=None):
"""
Returns settings to be passed to ES create_index.
If `settings_override` is provided, this will use `settings_override`
to override the defaults defined here.
"""
default_settings = {
'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS,
'number_of_shards': settings.ES_DEFAULT_NUM_SHARDS,
'refresh_interval': '5s',
'store.compress.tv': True,
'store.compress.stored': True,
'analysis': self.get_analysis(),
}
if settings_override:
default_settings.update(settings_override)
return default_settings
def get_analysis(self):
"""
Returns the analysis dict to be used in settings for create_index.
For languages that ES supports we define either the minimal or light
stemming, which isn't as aggresive as the snowball stemmer. We also
define the stopwords for that language.
For all languages we've customized we're using the ICU plugin.
"""
analyzers = {}
filters = {}
# The default is used for fields that need ICU but are composed of
# many languages.
analyzers['default_icu'] = {
'type': 'custom',
'tokenizer': 'icu_tokenizer',
'filter': ['word_delimiter', 'icu_folding', 'icu_normalizer'],
}
# Customize the word_delimiter filter to set various options.
filters['custom_word_delimiter'] = {
'type': 'word_delimiter',
'preserve_original': True,
}
return {
'analyzer': analyzers,
'filter': filters,
}
def timestamped_index(self):
return '{0}-{1}'.format(
self._index, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
def create_index(self, index=None):
"""
Creates index.
This uses `get_settings` and `get_mappings` to define the index.
"""
index = index or self._index
body = {
'settings': self.get_settings(),
}
self.es.indices.create(index=index, body=body)
def put_mapping(self, index=None):
index = index or self._index
self.es.indices.put_mapping(self._type, self.get_mapping(), index)
def bulk_index(self, data, index=None, chunk_size=500, parent=None,
routing=None):
"""
Given a list of documents, uses Elasticsearch bulk indexing.
For each doc this calls `extract_document`, then indexes.
`chunk_size` defaults to the elasticsearch lib's default. Override per
your document size as needed.
"""
index = index or self._index
docs = []
for d in data:
source = self.extract_document(d)
doc = {
'_index': index,
'_type': self._type,
'_id': source['id'],
'_source': source,
}
if parent:
doc['_parent'] = parent
if routing:
doc['_routing'] = routing
docs.append(doc)
# TODO: This doesn't work with the new ES setup.
bulk_index(self.es, docs, chunk_size=chunk_size)
def index_document(self, data, index=None, parent=None, routing=None):
doc = self.extract_document(data)
kwargs = {
'index': index or self._index,
'doc_type': self._type,
'body': doc,
'id': doc['id']
}
if parent:
kwargs['parent'] = parent
if routing:
kwargs['routing'] = routing
self.es.index(**kwargs)
def delete_document(self, body, index=None, parent=None, routing=None):
kwargs = {
'index': index or self._index,
'doc_type': self._type,
'body': body,
}
if parent:
kwargs['parent'] = parent
if routing:
kwargs['routing'] = routing
return self.es.delete_by_query(**kwargs)
def get_mapping(self):
"""
Returns the mapping for this _index and _type.
"""
raise NotImplementedError()
def extract_document(self, data):
"""
Extracts the Elasticsearch document for this object instance.
"""
raise NotImplementedError()
def update_aliases(self, new_index, delete=True):
"""
Points `_index` to `new_index` and deletes `_index` if delete=True.
The ES `update_aliases` is atomic.
"""
old_index = None
# Get current alias, if any.
try:
aliases = self.es.indices.get_alias(name=self._index)
if aliases and aliases.keys():
old_index = aliases.keys()[0]
except exceptions.NotFoundError:
pass
actions = []
if old_index:
actions.append({'remove': {'index': old_index,
'alias': self._index}})
actions.append({'add': {'index': new_index, 'alias': self._index}})
self.es.indices.update_aliases(body={'actions': actions})
# Delete old index if any and if specified.
if delete and old_index:
self.es.indices.delete(index=old_index)
def search(self, body, **kwargs):
return self.es.search(index=self._index, doc_type=self._type,
body=body, **kwargs)
class ProjectIndex(Index):
_type = 'project'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
'properties': {
'id': {'type': 'long'},
'name': {'type': 'string', 'analyzer': 'default_icu'},
'description': {'type': 'string', 'analyzer': 'default_icu'},
'slug': {'type': 'string', 'index': 'not_analyzed'},
'lang': {'type': 'string', 'index': 'not_analyzed'},
'tags': {'type': 'string', 'index': 'not_analyzed'},
'privacy': {'type': 'string', 'index': 'not_analyzed'},
'author': {
'type': 'string',
'analyzer': 'default_icu',
'fields': {
'raw': {
'type': 'string',
'index': 'not_analyzed',
},
},
},
'url': {'type': 'string', 'index': 'not_analyzed'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'name', 'slug', 'description', 'lang', 'tags', 'author', 'url')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add project boost.
doc['weight'] = data.get('weight', 1.0)
return doc
class PageIndex(Index):
_type = 'page'
_parent = 'project'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
# Associate a page with a project.
'_parent': {'type': self._parent},
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'sha': {'type': 'string', 'index': 'not_analyzed'},
'project': {'type': 'string', 'index': 'not_analyzed'},
'version': {'type': 'string', 'index': 'not_analyzed'},
'path': {'type': 'string', 'index': 'not_analyzed'},
'taxonomy': {'type': 'string', 'index': 'not_analyzed'},
'commit': {'type': 'string', 'index': 'not_analyzed'},
'title': {'type': 'string', 'analyzer': 'default_icu'},
'headers': {'type': 'string', 'analyzer': 'default_icu'},
'content': {'type': 'string', 'analyzer': 'default_icu'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'project', 'title', 'headers', 'version', 'path',
'content', 'taxonomy', 'commit')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add page boost.
doc['weight'] = data.get('weight', 1.0)
return doc
class SectionIndex(Index):
_type = 'section'
_parent = 'page'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
# Associate a section with a page.
'_parent': {'type': self._parent},
# Commenting this out until we need it.
# 'suggest': {
# "type": "completion",
# "index_analyzer": "simple",
# "search_analyzer": "simple",
# "payloads": True,
# },
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'project': {'type': 'string', 'index': 'not_analyzed'},
'version': {'type': 'string', 'index': 'not_analyzed'},
'path': {'type': 'string', 'index': 'not_analyzed'},
'page_id': {'type': 'string', 'index': 'not_analyzed'},
'commit': {'type': 'string', 'index': 'not_analyzed'},
'title': {'type': 'string', 'analyzer': 'default_icu'},
'content': {'type': 'string', 'analyzer': 'default_icu'},
'blocks': {
'type': 'object',
'properties': {
'code': {'type': 'string', 'analyzer': 'default_icu'}
}
},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'project', 'title', 'page_id', 'version', 'path', 'content', 'commit')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add page boost.
doc['weight'] = data.get('weight', 1.0)
return doc
| |
"""
This module presents an interface to use the glm implemented in
nistats.regression.
It contains the GLM and contrast classes that are meant to be the main objects
of fMRI data analyses.
Author: Bertrand Thirion, Martin Perez-Guevara, 2016
"""
import glob
import json
import os
import sys
import time
from warnings import warn
import numpy as np
import pandas as pd
from nibabel import Nifti1Image
from nibabel.onetime import setattr_on_read
from sklearn.base import (BaseEstimator,
clone,
TransformerMixin,
)
from sklearn.externals.joblib import Memory
from nilearn.input_data import NiftiMasker
from nilearn._utils import CacheMixin
from nilearn._utils.niimg_conversions import check_niimg
from sklearn.externals.joblib import (Parallel,
delayed,
)
from .contrasts import (_compute_fixed_effect_contrast,
expression_to_contrast_vector)
from .design_matrix import make_first_level_design_matrix
from .regression import (ARModel,
OLSModel,
SimpleRegressionResults,
RegressionResults
)
from .utils import (_basestring,
_check_run_tables,
_check_events_file_uses_tab_separators,
get_bids_files,
parse_bids_filename,
get_data
)
from nistats._utils.helpers import replace_parameters
def mean_scaling(Y, axis=0):
"""Scaling of the data to have percent of baseline change along the
specified axis
Parameters
----------
Y : array of shape (n_time_points, n_voxels)
The input data.
Returns
-------
Y : array of shape (n_time_points, n_voxels),
The data after mean-scaling, de-meaning and multiplication by 100.
mean : array of shape (n_voxels,)
The data mean.
"""
mean = Y.mean(axis=axis)
if (mean == 0).any():
warn('Mean values of 0 observed.'
'The data have probably been centered.'
'Scaling might not work as expected')
mean = np.maximum(mean, 1)
Y = 100 * (Y / mean - 1)
return Y, mean
def _ar_model_fit(X, val, Y):
"""Wrapper for fit method of ARModel to allow joblib parallelization"""
return ARModel(X, val).fit(Y)
def run_glm(Y, X, noise_model='ar1', bins=100, n_jobs=1, verbose=0):
""" GLM fit for an fMRI data matrix
Parameters
----------
Y : array of shape (n_time_points, n_voxels)
The fMRI data.
X : array of shape (n_time_points, n_regressors)
The design matrix.
noise_model : {'ar1', 'ols'}, optional
The temporal variance model. Defaults to 'ar1'.
bins : int, optional
Maximum number of discrete bins for the AR(1) coef histogram.
n_jobs : int, optional
The number of CPUs to use to do the computation. -1 means
'all CPUs'.
verbose : int, optional
The verbosity level. Defaut is 0
Returns
-------
labels : array of shape (n_voxels,),
A map of values on voxels used to identify the corresponding model.
results : dict,
Keys correspond to the different labels values
values are RegressionResults instances corresponding to the voxels.
"""
acceptable_noise_models = ['ar1', 'ols']
if noise_model not in acceptable_noise_models:
raise ValueError(
"Acceptable noise models are {0}. You provided "
"'noise_model={1}'".format(acceptable_noise_models,
noise_model)
)
if Y.shape[0] != X.shape[0]:
raise ValueError('The number of rows of Y '
'should match the number of rows of X.'
' You provided X with shape {0} '
'and Y with shape {1}'.
format(X.shape, Y.shape))
# Create the model
ols_result = OLSModel(X).fit(Y)
if noise_model == 'ar1':
# compute and discretize the AR1 coefs
ar1 = (
(ols_result.residuals[1:]
* ols_result.residuals[:-1]).sum(axis=0)
/ (ols_result.residuals ** 2).sum(axis=0)
)
del ols_result
ar1 = (ar1 * bins).astype(np.int) * 1. / bins
# Fit the AR model acccording to current AR(1) estimates
results = {}
labels = ar1
# Parallelize by creating a job per ARModel
vals = np.unique(ar1)
ar_result = Parallel(n_jobs=n_jobs, verbose=verbose)(
delayed(_ar_model_fit)(X, val, Y[:, labels == val])
for val in vals)
for val, result in zip(vals, ar_result):
results[val] = result
del vals
del ar_result
else:
labels = np.zeros(Y.shape[1])
results = {0.0: ols_result}
return labels, results
class FirstLevelModel(BaseEstimator, TransformerMixin, CacheMixin):
""" Implementation of the General Linear Model
for single session fMRI data.
Parameters
----------
t_r : float
This parameter indicates repetition times of the experimental runs.
In seconds. It is necessary to correctly consider times in the design
matrix. This parameter is also passed to nilearn.signal.clean.
Please see the related documentation for details.
slice_time_ref : float, optional (default 0.)
This parameter indicates the time of the reference slice used in the
slice timing preprocessing step of the experimental runs. It is
expressed as a percentage of the t_r (time repetition), so it can have
values between 0. and 1.
hrf_model : {'spm', 'spm + derivative', 'spm + derivative + dispersion',
'glover', 'glover + derivative', 'glover + derivative + dispersion',
'fir', None}
String that specifies the hemodynamic response function.
Defaults to 'glover'.
drift_model : string, optional
This parameter specifies the desired drift model for the design
matrices. It can be 'polynomial', 'cosine' or None.
high_pass : float, optional
This parameter specifies the cut frequency of the high-pass filter in
Hz for the design matrices. Used only if drift_model is 'cosine'.
drift_order : int, optional
This parameter specifices the order of the drift model (in case it is
polynomial) for the design matrices.
fir_delays : array of shape(n_onsets) or list, optional
In case of FIR design, yields the array of delays used in the FIR
model, in scans.
min_onset : float, optional
This parameter specifies the minimal onset relative to the design
(in seconds). Events that start before (slice_time_ref * t_r +
min_onset) are not considered.
mask_img : Niimg-like, NiftiMasker object or False, optional
Mask to be used on data. If an instance of masker is passed,
then its mask will be used. If no mask is given,
it will be computed automatically by a NiftiMasker with default
parameters. If False is given then the data will not be masked.
target_affine : 3x3 or 4x4 matrix, optional
This parameter is passed to nilearn.image.resample_img.
Please see the related documentation for details.
target_shape : 3-tuple of integers, optional
This parameter is passed to nilearn.image.resample_img.
Please see the related documentation for details.
smoothing_fwhm : float, optional
If smoothing_fwhm is not None, it gives the size in millimeters of
the spatial smoothing to apply to the signal.
memory : string, optional
Path to the directory used to cache the masking process and the glm
fit. By default, no caching is done.
Creates instance of joblib.Memory.
memory_level : integer, optional
Rough estimator of the amount of memory used by caching. Higher value
means more memory for caching.
standardize : boolean, optional
If standardize is True, the time-series are centered and normed:
their variance is put to 1 in the time dimension.
signal_scaling : False, int or (int, int), optional,
If not False, fMRI signals are
scaled to the mean value of scaling_axis given,
which can be 0, 1 or (0, 1).
0 refers to mean scaling each voxel with respect to time,
1 refers to mean scaling each time point with respect to all voxels &
(0, 1) refers to scaling with respect to voxels and time,
which is known as grand mean scaling.
Incompatible with standardize (standardize=False is enforced when
signal_scaling is not False).
noise_model : {'ar1', 'ols'}, optional
The temporal variance model. Defaults to 'ar1'
verbose : integer, optional
Indicate the level of verbosity. By default, nothing is printed.
If 0 prints nothing. If 1 prints progress by computation of
each run. If 2 prints timing details of masker and GLM. If 3
prints masker computation details.
n_jobs : integer, optional
The number of CPUs to use to do the computation. -1 means
'all CPUs', -2 'all CPUs but one', and so on.
minimize_memory : boolean, optional
Gets rid of some variables on the model fit results that are not
necessary for contrast computation and would only be useful for
further inspection of model details. This has an important impact
on memory consumption. True by default.
subject_label : string, optional
This id will be used to identify a `FirstLevelModel` when passed to
a `SecondLevelModel` object.
Attributes
----------
labels_ : array of shape (n_voxels,),
a map of values on voxels used to identify the corresponding model
results_ : dict,
with keys corresponding to the different labels values.
Values are SimpleRegressionResults corresponding to the voxels,
if minimize_memory is True,
RegressionResults if minimize_memory is False
"""
@replace_parameters({'mask': 'mask_img'}, end_version='next')
def __init__(self, t_r=None, slice_time_ref=0., hrf_model='glover',
drift_model='cosine', high_pass=.01, drift_order=1,
fir_delays=[0], min_onset=-24, mask_img=None,
target_affine=None, target_shape=None, smoothing_fwhm=None,
memory=Memory(None), memory_level=1, standardize=False,
signal_scaling=0, noise_model='ar1', verbose=0, n_jobs=1,
minimize_memory=True, subject_label=None):
# design matrix parameters
self.t_r = t_r
self.slice_time_ref = slice_time_ref
self.hrf_model = hrf_model
self.drift_model = drift_model
self.high_pass = high_pass
self.drift_order = drift_order
self.fir_delays = fir_delays
self.min_onset = min_onset
# glm parameters
self.mask_img = mask_img
self.target_affine = target_affine
self.target_shape = target_shape
self.smoothing_fwhm = smoothing_fwhm
if isinstance(memory, _basestring):
self.memory = Memory(memory)
else:
self.memory = memory
self.memory_level = memory_level
self.standardize = standardize
if signal_scaling is False:
self.signal_scaling = signal_scaling
elif signal_scaling in [0, 1, (0, 1)]:
self.scaling_axis = signal_scaling
self.signal_scaling = True
self.standardize = False
else:
raise ValueError('signal_scaling must be "False", "0", "1"'
' or "(0, 1)"')
self.noise_model = noise_model
self.verbose = verbose
self.n_jobs = n_jobs
self.minimize_memory = minimize_memory
# attributes
self.labels_ = None
self.results_ = None
self.subject_label = subject_label
def fit(self, run_imgs, events=None, confounds=None,
design_matrices=None):
""" Fit the GLM
For each run:
1. create design matrix X
2. do a masker job: fMRI_data -> Y
3. fit regression to (Y, X)
Parameters
----------
run_imgs: Niimg-like object or list of Niimg-like objects,
See http://nilearn.github.io/manipulating_images/input_output.html#inputing-data-file-names-or-image-objects # noqa:E501
Data on which the GLM will be fitted. If this is a list,
the affine is considered the same for all.
events: pandas Dataframe or string or list of pandas DataFrames or
strings
fMRI events used to build design matrices. One events object
expected per run_img. Ignored in case designs is not None.
If string, then a path to a csv file is expected.
confounds: pandas Dataframe or string or list of pandas DataFrames or
strings
Each column in a DataFrame corresponds to a confound variable
to be included in the regression model of the respective run_img.
The number of rows must match the number of volumes in the
respective run_img. Ignored in case designs is not None.
If string, then a path to a csv file is expected.
design_matrices: pandas DataFrame or list of pandas DataFrames,
Design matrices that will be used to fit the GLM. If given it
takes precedence over events and confounds.
"""
# Check arguments
# Check imgs type
if events is not None:
_check_events_file_uses_tab_separators(events_files=events)
if not isinstance(run_imgs, (list, tuple)):
run_imgs = [run_imgs]
if design_matrices is None:
if events is None:
raise ValueError('events or design matrices must be provided')
if self.t_r is None:
raise ValueError('t_r not given to FirstLevelModel object'
' to compute design from events')
else:
design_matrices = _check_run_tables(run_imgs, design_matrices,
'design_matrices')
# Check that number of events and confound files match number of runs
# Also check that events and confound files can be loaded as DataFrame
if events is not None:
events = _check_run_tables(run_imgs, events, 'events')
if confounds is not None:
confounds = _check_run_tables(run_imgs, confounds, 'confounds')
# Learn the mask
if self.mask_img is False:
# We create a dummy mask to preserve functionality of api
ref_img = check_niimg(run_imgs[0])
self.mask_img = Nifti1Image(np.ones(ref_img.shape[:3]),
ref_img.affine)
if not isinstance(self.mask_img, NiftiMasker):
self.masker_ = NiftiMasker(mask_img=self.mask_img,
smoothing_fwhm=self.smoothing_fwhm,
target_affine=self.target_affine,
standardize=self.standardize,
mask_strategy='epi',
t_r=self.t_r,
memory=self.memory,
verbose=max(0, self.verbose - 2),
target_shape=self.target_shape,
memory_level=self.memory_level
)
self.masker_.fit(run_imgs[0])
else:
if self.mask_img.mask_img_ is None and self.masker_ is None:
self.masker_ = clone(self.mask_img)
for param_name in ['target_affine', 'target_shape',
'smoothing_fwhm', 't_r', 'memory',
'memory_level']:
our_param = getattr(self, param_name)
if our_param is None:
continue
if getattr(self.masker_, param_name) is not None:
warn('Parameter %s of the masker'
' overriden' % param_name)
setattr(self.masker_, param_name, our_param)
self.masker_.fit(run_imgs[0])
else:
self.masker_ = self.mask_img
# For each run fit the model and keep only the regression results.
self.labels_, self.results_, self.design_matrices_ = [], [], []
n_runs = len(run_imgs)
t0 = time.time()
for run_idx, run_img in enumerate(run_imgs):
# Report progress
if self.verbose > 0:
percent = float(run_idx) / n_runs
percent = round(percent * 100, 2)
dt = time.time() - t0
# We use a max to avoid a division by zero
if run_idx == 0:
remaining = 'go take a coffee, a big one'
else:
remaining = (100. - percent) / max(0.01, percent) * dt
remaining = '%i seconds remaining' % remaining
sys.stderr.write(
"Computing run %d out of %d runs (%s)\n"
% (run_idx + 1, n_runs, remaining))
# Build the experimental design for the glm
run_img = check_niimg(run_img, ensure_ndim=4)
if design_matrices is None:
n_scans = get_data(run_img).shape[3]
if confounds is not None:
confounds_matrix = confounds[run_idx].values
if confounds_matrix.shape[0] != n_scans:
raise ValueError('Rows in confounds does not match'
'n_scans in run_img at index %d'
% (run_idx,))
confounds_names = confounds[run_idx].columns.tolist()
else:
confounds_matrix = None
confounds_names = None
start_time = self.slice_time_ref * self.t_r
end_time = (n_scans - 1 + self.slice_time_ref) * self.t_r
frame_times = np.linspace(start_time, end_time, n_scans)
design = make_first_level_design_matrix(frame_times,
events[run_idx],
self.hrf_model,
self.drift_model,
self.high_pass,
self.drift_order,
self.fir_delays,
confounds_matrix,
confounds_names,
self.min_onset
)
else:
design = design_matrices[run_idx]
self.design_matrices_.append(design)
# Mask and prepare data for GLM
if self.verbose > 1:
t_masking = time.time()
sys.stderr.write('Starting masker computation \r')
Y = self.masker_.transform(run_img)
del run_img # Delete unmasked image to save memory
if self.verbose > 1:
t_masking = time.time() - t_masking
sys.stderr.write('Masker took %d seconds \n'
% t_masking)
if self.signal_scaling:
Y, _ = mean_scaling(Y, self.scaling_axis)
if self.memory:
mem_glm = self.memory.cache(run_glm, ignore=['n_jobs'])
else:
mem_glm = run_glm
# compute GLM
if self.verbose > 1:
t_glm = time.time()
sys.stderr.write('Performing GLM computation\r')
labels, results = mem_glm(Y, design.values,
noise_model=self.noise_model,
bins=100, n_jobs=self.n_jobs)
if self.verbose > 1:
t_glm = time.time() - t_glm
sys.stderr.write('GLM took %d seconds \n' % t_glm)
self.labels_.append(labels)
# We save memory if inspecting model details is not necessary
if self.minimize_memory:
for key in results:
results[key] = SimpleRegressionResults(results[key])
self.results_.append(results)
del Y
# Report progress
if self.verbose > 0:
sys.stderr.write("\nComputation of %d runs done in %i seconds\n\n"
% (n_runs, time.time() - t0))
return self
def compute_contrast(self, contrast_def, stat_type=None,
output_type='z_score'):
"""Generate different outputs corresponding to
the contrasts provided e.g. z_map, t_map, effects and variance.
In multi-session case, outputs the fixed effects map.
Parameters
----------
contrast_def : str or array of shape (n_col) or list of (string or
array of shape (n_col))
where ``n_col`` is the number of columns of the design matrix,
(one array per run). If only one array is provided when there
are several runs, it will be assumed that the same contrast is
desired for all runs. The string can be a formula compatible with
`pandas.DataFrame.eval`. Basically one can use the name of the
conditions as they appear in the design matrix of the fitted model
combined with operators +- and combined with numbers
with operators +-`*`/.
stat_type : {'t', 'F'}, optional
type of the contrast
output_type : str, optional
Type of the output map. Can be 'z_score', 'stat', 'p_value',
'effect_size', 'effect_variance' or 'all'
Returns
-------
output : Nifti1Image or dict
The desired output image(s). If ``output_type == 'all'``, then
the output is a dictionary of images, keyed by the type of image.
"""
if self.labels_ is None or self.results_ is None:
raise ValueError('The model has not been fit yet')
if isinstance(contrast_def, (np.ndarray, str)):
con_vals = [contrast_def]
elif isinstance(contrast_def, (list, tuple)):
con_vals = contrast_def
else:
raise ValueError('contrast_def must be an array or str or list of'
' (array or str)')
# Translate formulas to vectors
for cidx, (con, design_mat) in enumerate(zip(con_vals,
self.design_matrices_)
):
design_columns = design_mat.columns.tolist()
if isinstance(con, _basestring):
con_vals[cidx] = expression_to_contrast_vector(
con, design_columns)
n_runs = len(self.labels_)
if len(con_vals) != n_runs:
warn('One contrast given, assuming it for all %d runs' % n_runs)
con_vals = con_vals * n_runs
valid_types = ['z_score', 'stat', 'p_value', 'effect_size',
'effect_variance']
valid_types.append('all') # ensuring 'all' is the final entry.
if output_type not in valid_types:
raise ValueError(
'output_type must be one of {}'.format(valid_types))
contrast = _compute_fixed_effect_contrast(self.labels_, self.results_,
con_vals, stat_type)
output_types = (valid_types[:-1]
if output_type == 'all' else [output_type])
outputs = {}
for output_type_ in output_types:
estimate_ = getattr(contrast, output_type_)()
# Prepare the returned images
output = self.masker_.inverse_transform(estimate_)
contrast_name = str(con_vals)
output.header['descrip'] = (
'%s of contrast %s' % (output_type_, contrast_name))
outputs[output_type_] = output
return outputs if output_type == 'all' else output
def _get_voxelwise_model_attribute(self, attribute,
result_as_time_series):
"""Transform RegressionResults instances within a dictionary
(whose keys represent the autoregressive coefficient under the 'ar1'
noise model or only 0.0 under 'ols' noise_model and values are the
RegressionResults instances) into input nifti space.
Parameters
----------
attribute : str
an attribute of a RegressionResults instance.
possible values include: resid, norm_resid, predicted,
SSE, r_square, MSE.
result_as_time_series : bool
whether the RegressionResult attribute has a value
per timepoint of the input nifti image.
Returns
-------
output : list
a list of Nifti1Image(s)
"""
# check if valid attribute is being accessed.
all_attributes = dict(vars(RegressionResults)).keys()
possible_attributes = [prop
for prop in all_attributes
if '__' not in prop
]
if attribute not in possible_attributes:
msg = ("attribute must be one of: "
"{attr}".format(attr=possible_attributes)
)
raise ValueError(msg)
if self.minimize_memory:
raise ValueError(
'To access voxelwise attributes like '
'R-squared, residuals, and predictions, '
'the `FirstLevelModel`-object needs to store '
'there attributes. '
'To do so, set `minimize_memory` to `False` '
'when initializing the `FirstLevelModel`-object.')
if self.labels_ is None or self.results_ is None:
raise ValueError('The model has not been fit yet')
output = []
for design_matrix, labels, results in zip(self.design_matrices_,
self.labels_,
self.results_
):
if result_as_time_series:
voxelwise_attribute = np.zeros((design_matrix.shape[0],
len(labels))
)
else:
voxelwise_attribute = np.zeros((1, len(labels)))
for label_ in results:
label_mask = labels == label_
voxelwise_attribute[:, label_mask] = getattr(results[label_],
attribute)
output.append(self.masker_.inverse_transform(voxelwise_attribute))
return output
@setattr_on_read
def residuals(self):
"""Transform voxelwise residuals to the same shape
as the input Nifti1Image(s)
Returns
-------
output : list
a list of Nifti1Image(s)
"""
return self._get_voxelwise_model_attribute('resid',
result_as_time_series=True)
@setattr_on_read
def predicted(self):
"""Transform voxelwise predicted values to the same shape
as the input Nifti1Image(s)
Returns
-------
output : list
a list of Nifti1Image(s)
"""
return self._get_voxelwise_model_attribute('predicted',
result_as_time_series=True)
@setattr_on_read
def r_square(self):
"""Transform voxelwise r-squared values to the same shape
as the input Nifti1Image(s)
Returns
-------
output : list
a list of Nifti1Image(s)
"""
return self._get_voxelwise_model_attribute('r_square',
result_as_time_series=False
)
@replace_parameters({'mask': 'mask_img'}, end_version='next')
def first_level_models_from_bids(dataset_path, task_label, space_label=None,
img_filters=None, t_r=None, slice_time_ref=0.,
hrf_model='glover', drift_model='cosine',
high_pass=.01, drift_order=1, fir_delays=[0],
min_onset=-24, mask_img=None,
target_affine=None, target_shape=None,
smoothing_fwhm=None, memory=Memory(None),
memory_level=1, standardize=False,
signal_scaling=0, noise_model='ar1',
verbose=0, n_jobs=1,
minimize_memory=True,
derivatives_folder='derivatives'):
"""Create FirstLevelModel objects and fit arguments from a BIDS dataset.
It t_r is not specified this function will attempt to load it from a
bold.json file alongside slice_time_ref. Otherwise t_r and slice_time_ref
are taken as given.
Parameters
----------
dataset_path: str
Directory of the highest level folder of the BIDS dataset. Should
contain subject folders and a derivatives folder.
task_label: str
Task_label as specified in the file names like _task-<task_label>_.
space_label: str, optional
Specifies the space label of the preprocessed bold.nii images.
As they are specified in the file names like _space-<space_label>_.
img_filters: list of tuples (str, str), optional (default: None)
Filters are of the form (field, label). Only one filter per field
allowed. A file that does not match a filter will be discarded.
Possible filters are 'acq', 'ce', 'dir', 'rec', 'run', 'echo', 'res',
'den', and 'desc'. Filter examples would be ('desc', 'preproc'),
('dir', 'pa') and ('run', '10').
derivatives_folder: str, optional
derivatives and app folder path containing preprocessed files.
Like "derivatives/FMRIPREP". default is simply "derivatives".
All other parameters correspond to a `FirstLevelModel` object, which
contains their documentation. The subject label of the model will be
determined directly from the BIDS dataset.
Returns
-------
models: list of `FirstLevelModel` objects
Each FirstLevelModel object corresponds to a subject. All runs from
different sessions are considered together for the same subject to run
a fixed effects analysis on them.
models_run_imgs: list of list of Niimg-like objects,
Items for the FirstLevelModel fit function of their respective model.
models_events: list of list of pandas DataFrames,
Items for the FirstLevelModel fit function of their respective model.
models_confounds: list of list of pandas DataFrames or None,
Items for the FirstLevelModel fit function of their respective model.
"""
# check arguments
img_filters = img_filters if img_filters else []
if not isinstance(dataset_path, str):
raise TypeError(
'dataset_path must be a string, instead %s was given' %
type(task_label))
if not os.path.exists(dataset_path):
raise ValueError('given path do not exist: %s' % dataset_path)
if not isinstance(task_label, str):
raise TypeError('task_label must be a string, instead %s was given' %
type(task_label))
if space_label is not None and not isinstance(space_label, str):
raise TypeError('space_label must be a string, instead %s was given' %
type(space_label))
if not isinstance(img_filters, list):
raise TypeError('img_filters must be a list, instead %s was given' %
type(img_filters))
for img_filter in img_filters:
if (not isinstance(img_filter[0], str)
or not isinstance(img_filter[1], str)):
raise TypeError('filters in img filters must be (str, str), '
'instead %s was given' % type(img_filter))
if img_filter[0] not in ['acq', 'ce', 'dir', 'rec', 'run',
'echo', 'desc', 'res', 'den',
]:
raise ValueError(
"field %s is not a possible filter. Only "
"'acq', 'ce', 'dir', 'rec', 'run', 'echo', "
"'desc', 'res', 'den' are allowed." % img_filter[0])
# check derivatives folder is present
derivatives_path = os.path.join(dataset_path, derivatives_folder)
if not os.path.exists(derivatives_path):
raise ValueError('derivatives folder does not exist in given dataset')
# Get acq specs for models. RepetitionTime and SliceTimingReference.
# Throw warning if no bold.json is found
if t_r is not None:
warn('RepetitionTime given in model_init as %d' % t_r)
warn('slice_time_ref is %d percent of the repetition '
'time' % slice_time_ref)
else:
filters = [('task', task_label)]
for img_filter in img_filters:
if img_filter[0] in ['acq', 'rec', 'run']:
filters.append(img_filter)
img_specs = get_bids_files(derivatives_path, modality_folder='func',
file_tag='bold', file_type='json',
filters=filters)
# If we dont find the parameter information in the derivatives folder
# we try to search in the raw data folder
if not img_specs:
img_specs = get_bids_files(dataset_path, modality_folder='func',
file_tag='bold', file_type='json',
filters=filters)
if not img_specs:
warn('No bold.json found in derivatives folder or '
'in dataset folder. t_r can not be inferred and will need to'
' be set manually in the list of models, otherwise their fit'
' will throw an exception')
else:
specs = json.load(open(img_specs[0], 'r'))
if 'RepetitionTime' in specs:
t_r = float(specs['RepetitionTime'])
else:
warn('RepetitionTime not found in file %s. t_r can not be '
'inferred and will need to be set manually in the '
'list of models. Otherwise their fit will throw an '
' exception' % img_specs[0])
if 'SliceTimingRef' in specs:
slice_time_ref = float(specs['SliceTimingRef'])
else:
warn('SliceTimingRef not found in file %s. It will be assumed'
' that the slice timing reference is 0.0 percent of the '
'repetition time. If it is not the case it will need to '
'be set manually in the generated list of models' %
img_specs[0])
# Infer subjects in dataset
sub_folders = glob.glob(os.path.join(derivatives_path, 'sub-*/'))
sub_labels = [os.path.basename(s[:-1]).split('-')[1] for s in sub_folders]
sub_labels = sorted(list(set(sub_labels)))
# Build fit_kwargs dictionaries to pass to their respective models fit
# Events and confounds files must match number of imgs (runs)
models = []
models_run_imgs = []
models_events = []
models_confounds = []
for sub_label in sub_labels:
# Create model
model = FirstLevelModel(
t_r=t_r, slice_time_ref=slice_time_ref, hrf_model=hrf_model,
drift_model=drift_model, high_pass=high_pass,
drift_order=drift_order, fir_delays=fir_delays,
min_onset=min_onset, mask_img=mask_img,
target_affine=target_affine, target_shape=target_shape,
smoothing_fwhm=smoothing_fwhm, memory=memory,
memory_level=memory_level, standardize=standardize,
signal_scaling=signal_scaling, noise_model=noise_model,
verbose=verbose, n_jobs=n_jobs,
minimize_memory=minimize_memory, subject_label=sub_label)
models.append(model)
# Get preprocessed imgs
if space_label is None:
filters = [('task', task_label)] + img_filters
else:
filters = [('task', task_label),
('space', space_label)] + img_filters
imgs = get_bids_files(derivatives_path, modality_folder='func',
file_tag='bold', file_type='nii*',
sub_label=sub_label, filters=filters)
# If there is more than one file for the same (ses, run), likely we
# have an issue of underspecification of filters.
run_check_list = []
# If more than one run is present the run field is mandatory in BIDS
# as well as the ses field if more than one session is present.
if len(imgs) > 1:
for img in imgs:
img_dict = parse_bids_filename(img)
if (
'_ses-' in img_dict['file_basename']
and '_run-' in img_dict['file_basename']
):
if (img_dict['ses'], img_dict['run']) in run_check_list:
raise ValueError(
'More than one nifti image found '
'for the same run %s and session %s. '
'Please verify that the '
'desc_label and space_label labels '
'corresponding to the BIDS spec '
'were correctly specified.' %
(img_dict['run'], img_dict['ses']))
else:
run_check_list.append((img_dict['ses'],
img_dict['run']))
elif '_ses-' in img_dict['file_basename']:
if img_dict['ses'] in run_check_list:
raise ValueError(
'More than one nifti image '
'found for the same ses %s, while '
'no additional run specification present'
'. Please verify that the desc_label and '
'space_label labels '
'corresponding to the BIDS spec '
'were correctly specified.' %
img_dict['ses'])
else:
run_check_list.append(img_dict['ses'])
elif '_run-' in img_dict['file_basename']:
if img_dict['run'] in run_check_list:
raise ValueError(
'More than one nifti image '
'found for the same run %s. '
'Please verify that the desc_label and '
'space_label labels '
'corresponding to the BIDS spec '
'were correctly specified.' %
img_dict['run'])
else:
run_check_list.append(img_dict['run'])
models_run_imgs.append(imgs)
# Get events and extra confounds
filters = [('task', task_label)]
for img_filter in img_filters:
if img_filter[0] in ['acq', 'rec', 'run']:
filters.append(img_filter)
# Get events files
events = get_bids_files(dataset_path, modality_folder='func',
file_tag='events', file_type='tsv',
sub_label=sub_label, filters=filters)
if events:
if len(events) != len(imgs):
raise ValueError('%d events.tsv files found for %d bold '
'files. Same number of event files as '
'the number of runs is expected' %
(len(events), len(imgs)))
events = [pd.read_csv(event, sep='\t', index_col=None)
for event in events]
models_events.append(events)
else:
raise ValueError('No events.tsv files found')
# Get confounds. If not found it will be assumed there are none.
# If there are confounds, they are assumed to be present for all runs.
confounds = get_bids_files(derivatives_path, modality_folder='func',
file_tag='desc-confounds_regressors',
file_type='tsv', sub_label=sub_label,
filters=filters)
if confounds:
if len(confounds) != len(imgs):
raise ValueError('%d confounds.tsv files found for %d bold '
'files. Same number of confound files as '
'the number of runs is expected' %
(len(events), len(imgs)))
confounds = [pd.read_csv(c, sep='\t', index_col=None)
for c in confounds]
models_confounds.append(confounds)
return models, models_run_imgs, models_events, models_confounds
| |
from __future__ import unicode_literals
from moto.core import BaseBackend
from .utils import random_job_id, random_instance_group_id
DEFAULT_JOB_FLOW_ROLE = 'EMRJobflowDefault'
class FakeInstanceGroup(object):
def __init__(self, id, instance_count, instance_role, instance_type, market, name, bid_price=None):
self.id = id
self.num_instances = instance_count
self.role = instance_role
self.type = instance_type
self.market = market
self.name = name
self.bid_price = bid_price
def set_instance_count(self, instance_count):
self.num_instances = instance_count
class Cluster(object):
def __init__(self, id, name, availability_zone, ec2_key_name, subnet_id,
ec2_iam_profile, log_uri):
self.id = id
self.name = name
self.applications = []
self.auto_terminate = "false"
self.availability_zone = availability_zone
self.subnet_id = subnet_id
self.ec2_iam_profile = ec2_iam_profile
self.log_uri = log_uri
self.master_public_dns_name = ""
self.normalized_instance_hours = 0
self.requested_ami_version = "2.4.2"
self.running_ami_version = "2.4.2"
self.service_role = "my-service-role"
self.state = "RUNNING"
self.tags = {}
self.termination_protected = "false"
self.visible_to_all_users = "false"
def add_tags(self, tags):
self.tags.update(tags)
def remove_tags(self, tag_keys):
for key in tag_keys:
self.tags.pop(key, None)
class FakeStep(object):
def __init__(self, state, **kwargs):
# 'Steps.member.1.HadoopJarStep.Jar': ['/home/hadoop/contrib/streaming/hadoop-streaming.jar'],
# 'Steps.member.1.HadoopJarStep.Args.member.1': ['-mapper'],
# 'Steps.member.1.HadoopJarStep.Args.member.2': ['s3n://elasticmapreduce/samples/wordcount/wordSplitter.py'],
# 'Steps.member.1.HadoopJarStep.Args.member.3': ['-reducer'],
# 'Steps.member.1.HadoopJarStep.Args.member.4': ['aggregate'],
# 'Steps.member.1.HadoopJarStep.Args.member.5': ['-input'],
# 'Steps.member.1.HadoopJarStep.Args.member.6': ['s3n://elasticmapreduce/samples/wordcount/input'],
# 'Steps.member.1.HadoopJarStep.Args.member.7': ['-output'],
# 'Steps.member.1.HadoopJarStep.Args.member.8': ['s3n://<my output bucket>/output/wordcount_output'],
# 'Steps.member.1.ActionOnFailure': ['TERMINATE_JOB_FLOW'],
# 'Steps.member.1.Name': ['My wordcount example']}
self.action_on_failure = kwargs['action_on_failure']
self.name = kwargs['name']
self.jar = kwargs['hadoop_jar_step._jar']
self.args = []
self.state = state
arg_index = 1
while True:
arg = kwargs.get('hadoop_jar_step._args.member.{0}'.format(arg_index))
if arg:
self.args.append(arg)
arg_index += 1
else:
break
class FakeJobFlow(object):
def __init__(self, job_id, name, log_uri, job_flow_role, visible_to_all_users, steps, instance_attrs):
self.id = job_id
self.name = name
self.log_uri = log_uri
self.role = job_flow_role or DEFAULT_JOB_FLOW_ROLE
self.state = "STARTING"
self.steps = []
self.add_steps(steps)
self.initial_instance_count = instance_attrs.get('instance_count', 0)
self.initial_master_instance_type = instance_attrs.get('master_instance_type')
self.initial_slave_instance_type = instance_attrs.get('slave_instance_type')
self.set_visibility(visible_to_all_users)
self.normalized_instance_hours = 0
self.ec2_key_name = instance_attrs.get('ec2_key_name')
self.availability_zone = instance_attrs.get('placement.availability_zone')
self.subnet_id = instance_attrs.get('ec2_subnet_id')
self.keep_job_flow_alive_when_no_steps = instance_attrs.get('keep_job_flow_alive_when_no_steps')
self.termination_protected = instance_attrs.get('termination_protected')
self.instance_group_ids = []
def create_cluster(self):
cluster = Cluster(
id=self.id,
name=self.name,
availability_zone=self.availability_zone,
ec2_key_name=self.ec2_key_name,
subnet_id=self.subnet_id,
ec2_iam_profile=self.role,
log_uri=self.log_uri,
)
return cluster
def terminate(self):
self.state = 'TERMINATED'
def set_visibility(self, visibility):
if visibility == 'true':
self.visible_to_all_users = True
else:
self.visible_to_all_users = False
def add_steps(self, steps):
for index, step in enumerate(steps):
if self.steps:
# If we already have other steps, this one is pending
self.steps.append(FakeStep(state='PENDING', **step))
else:
self.steps.append(FakeStep(state='STARTING', **step))
def add_instance_group(self, instance_group_id):
self.instance_group_ids.append(instance_group_id)
@property
def instance_groups(self):
return emr_backend.get_instance_groups(self.instance_group_ids)
@property
def master_instance_type(self):
groups = self.instance_groups
if groups:
return groups[0].type
else:
return self.initial_master_instance_type
@property
def slave_instance_type(self):
groups = self.instance_groups
if groups:
return groups[0].type
else:
return self.initial_slave_instance_type
@property
def instance_count(self):
groups = self.instance_groups
if not groups:
# No groups,return initial instance count
return self.initial_instance_count
count = 0
for group in groups:
count += int(group.num_instances)
return count
class ElasticMapReduceBackend(BaseBackend):
def __init__(self):
self.job_flows = {}
self.clusters = {}
self.instance_groups = {}
def run_job_flow(self, name, log_uri, job_flow_role, visible_to_all_users, steps, instance_attrs):
job_id = random_job_id()
job_flow = FakeJobFlow(job_id, name, log_uri, job_flow_role, visible_to_all_users, steps, instance_attrs)
self.job_flows[job_id] = job_flow
cluster = job_flow.create_cluster()
self.clusters[cluster.id] = cluster
return job_flow
def add_job_flow_steps(self, job_flow_id, steps):
job_flow = self.job_flows[job_flow_id]
job_flow.add_steps(steps)
return job_flow
def describe_job_flows(self, job_flow_ids=None):
jobs = self.job_flows.values()
if job_flow_ids:
return [job for job in jobs if job.id in job_flow_ids]
else:
return jobs
def terminate_job_flows(self, job_ids):
flows = [flow for flow in self.describe_job_flows() if flow.id in job_ids]
for flow in flows:
flow.terminate()
return flows
def list_clusters(self):
return self.clusters.values()
def get_cluster(self, cluster_id):
return self.clusters[cluster_id]
def get_instance_groups(self, instance_group_ids):
return [
group for group_id, group
in self.instance_groups.items()
if group_id in instance_group_ids
]
def add_instance_groups(self, job_flow_id, instance_groups):
job_flow = self.job_flows[job_flow_id]
result_groups = []
for instance_group in instance_groups:
instance_group_id = random_instance_group_id()
group = FakeInstanceGroup(instance_group_id, **instance_group)
self.instance_groups[instance_group_id] = group
job_flow.add_instance_group(instance_group_id)
result_groups.append(group)
return result_groups
def modify_instance_groups(self, instance_groups):
result_groups = []
for instance_group in instance_groups:
group = self.instance_groups[instance_group['instance_group_id']]
group.set_instance_count(instance_group['instance_count'])
return result_groups
def set_visible_to_all_users(self, job_ids, visible_to_all_users):
for job_id in job_ids:
job = self.job_flows[job_id]
job.set_visibility(visible_to_all_users)
def add_tags(self, cluster_id, tags):
cluster = self.get_cluster(cluster_id)
cluster.add_tags(tags)
def remove_tags(self, cluster_id, tag_keys):
cluster = self.get_cluster(cluster_id)
cluster.remove_tags(tag_keys)
emr_backend = ElasticMapReduceBackend()
| |
# Copyright 2016 Infoblox Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
import testtools
from heat_infoblox import object_manipulator
MEMBER_WITH_ANYCAST_IP = {
'_ref': u'member/b25lLnZpcnR1YWxfbm9kZSQw:master-113.ft-ac.com',
'additional_ip_list': [{
'anycast': True,
'enable_bgp': False,
'enable_ospf': False,
'interface': u'LOOPBACK',
'ipv4_network_setting': {
'address': u'172.252.5.2',
'dscp': 0,
'primary': False,
'subnet_mask': u'255.255.255.255',
'use_dscp': False}}]}
class TestObjectManipulator(testtools.TestCase):
def _test_create_anycast_loopback(self, ip, expected_anycast_dict,
old_ip=None, ip_list=None):
member_name = 'member_name'
connector = mock.Mock()
member = MEMBER_WITH_ANYCAST_IP.copy()
if ip_list:
member['additional_ip_list'] = ip_list
connector.get_object.return_value = [member]
om = object_manipulator.InfobloxObjectManipulator(connector)
om.create_anycast_loopback(member_name, ip, enable_bgp=True,
enable_ospf=True, old_ip=old_ip)
connector.get_object.assert_called_once_with(
'member', {'host_name': member_name},
['additional_ip_list'], extattrs=None)
expected_ip_list = MEMBER_WITH_ANYCAST_IP['additional_ip_list'][:]
if old_ip:
for idx, val in enumerate(expected_ip_list):
if ':' in old_ip:
if 'ipv6_network_setting' in val:
check_ip = val['ipv6_network_setting']['virtual_ip']
else:
continue
else:
if 'ipv4_network_setting' in val:
check_ip = val['ipv4_network_setting']['address']
else:
continue
if check_ip == old_ip:
expected_ip_list.pop(idx)
break
expected_ip_list.append(expected_anycast_dict)
connector.update_object.assert_called_once_with(
MEMBER_WITH_ANYCAST_IP['_ref'],
{'additional_ip_list': expected_ip_list})
def test_create_anycast_loopback_v4(self):
ip = '172.23.25.25'
expected_anycast_dict = {'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}
self._test_create_anycast_loopback(ip, expected_anycast_dict)
def test_create_anycast_loopback_v6(self):
ip = 'fffe::5'
expected_anycast_dict = {'anycast': True,
'ipv6_network_setting':
{'virtual_ip': ip,
'cidr_prefix': 128},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}
self._test_create_anycast_loopback(ip, expected_anycast_dict)
def test_create_anycast_loopback_old_ip_v4(self):
ip = '172.23.25.26'
old_ip = '172.252.5.2'
ip_list = MEMBER_WITH_ANYCAST_IP['additional_ip_list'][:]
ip_list.append(
{
'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': old_ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True
})
expected_anycast_dict = {'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}
self._test_create_anycast_loopback(ip, expected_anycast_dict,
old_ip=old_ip, ip_list=ip_list)
def test_create_anycast_loopback_old_ip_v6(self):
ip = 'fffe::5'
old_ip = 'fffe::4'
ip_list = MEMBER_WITH_ANYCAST_IP['additional_ip_list'][:]
ip_list.append(
{
'anycast': True,
'ipv6_network_setting':
{'subnet_mask': 128,
'virtual_ip': old_ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True
})
expected_anycast_dict = {'anycast': True,
'ipv6_network_setting':
{'cidr_prefix': 128,
'virtual_ip': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}
self._test_create_anycast_loopback(ip, expected_anycast_dict,
old_ip=old_ip, ip_list=ip_list)
def _test_delete_anycast_loopback(self, ip, members, expected_calls):
connector = mock.Mock()
connector.get_object.return_value = members
om = object_manipulator.InfobloxObjectManipulator(connector)
om.delete_anycast_loopback(ip)
connector.get_object.assert_called_once_with(
'member', return_fields=['additional_ip_list'])
connector.update_object.assert_has_calls(expected_calls)
def test_delete_anycast_loopback_single_anycast(self):
ip = '172.23.25.25'
members = [
{'_ref': u'member/a25lL2ecnR1YWxfbm9kZSQw:master-113.ft-ac.com',
'additional_ip_list': [{'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}]},
{'_ref': u'member/cnR1YWxf:master-host.infoblox.com',
'additional_ip_list': [{'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}]},
MEMBER_WITH_ANYCAST_IP]
# update should be called only for the first two members,
# where anycast ip matches
expected_calls = [
mock.call(members[0]['_ref'], {'additional_ip_list': []}),
mock.call(members[1]['_ref'], {'additional_ip_list': []})]
self._test_delete_anycast_loopback(ip, members, expected_calls)
def test_delete_anycast_loopback_multiple_anycast(self):
ip = '172.23.25.25'
members = [
{'_ref': u'member/a25lLnZpcnR1YWxfbm9kZSQw:master-113.ft-ac.com',
'additional_ip_list': [
{'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True},
MEMBER_WITH_ANYCAST_IP['additional_ip_list'][0]]},
{'_ref': u'member/cnR1YWxf:master-host.infoblox.com',
'additional_ip_list': [{'anycast': True,
'ipv4_network_setting':
{'subnet_mask': '255.255.255.255',
'address': ip},
'enable_bgp': True,
'interface': 'LOOPBACK',
'enable_ospf': True}]}]
expected_calls = [
mock.call(members[0]['_ref'],
{'additional_ip_list':
MEMBER_WITH_ANYCAST_IP['additional_ip_list']}),
mock.call(members[1]['_ref'], {'additional_ip_list': []})]
self._test_delete_anycast_loopback(ip, members, expected_calls)
def test__copy_fields_or_raise(self):
fields = ['field-one', 'field-two']
source = {'field-one': 1,
'field-two': 'text',
'non-copy': 12}
dest = {}
object_manipulator.InfobloxObjectManipulator._copy_fields_or_raise(
source, dest, fields)
self.assertEqual(2, len(dest))
self.assertEqual(1, dest['field-one'])
self.assertEqual('text', dest['field-two'])
def test__copy_fields_or_raise_raises_value_error(self):
fields = ['field-one']
source = {'non-copy': 12}
dest = {}
objm = object_manipulator.InfobloxObjectManipulator
self.assertRaises(ValueError,
objm._copy_fields_or_raise,
source,
dest,
fields)
def _test_configuration_update(self, method_name, object_type, field,
members, create_options, expected_options,
**kwargs):
member_name = 'my_member'
connector = mock.Mock()
connector.get_object.return_value = members
om = object_manipulator.InfobloxObjectManipulator(connector)
method_to_call = getattr(om, method_name)
method_to_call(member_name, create_options)
connector.get_object.assert_called_once_with(
object_type, {'host_name': member_name},
[field], extattrs=None)
connector.update_object.assert_called_once_with(
members[0]['_ref'], {field: expected_options})
def _test_create_ospf(self, members, ospf_options, expected_options):
self._test_configuration_update('create_ospf', 'member', 'ospf_list',
members, ospf_options,
expected_options)
def test_create_ospf(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': []}]
ospf_options = dict(advertise_interface_vlan=10,
area_id='1',
area_type='STANDARD',
authentication_key='12',
authentication_type='NONE',
interface='IP',
is_ipv4=True,
key_id=12,
auto_calc_cost_enabled=True)
expected_option = ospf_options.copy()
# Remove fields that are not used in current conditions
del expected_option['authentication_key']
del expected_option['key_id']
self._test_create_ospf(members, ospf_options, [expected_option])
def test_create_ospf_mesage_digest_and_ha(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': []}]
ospf_options = dict(advertise_interface_vlan=10,
area_id='1',
area_type='STANDARD',
authentication_key='12',
authentication_type='MESSAGE_DIGEST',
interface='LAN_HA',
is_ipv4=True,
key_id=12,
cost=5,
auto_calc_cost_enabled=True)
expected_option = ospf_options.copy()
# Remove fields that are not used in current conditions
del expected_option['advertise_interface_vlan']
del expected_option['cost']
self._test_create_ospf(members, ospf_options, [expected_option])
def test_create_ospf_simple(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': []}]
ospf_options = dict(advertise_interface_vlan=10,
area_id='1',
area_type='STANDARD',
authentication_key='12',
authentication_type='SIMPLE',
interface='LAN_HA',
is_ipv4=True,
key_id=12,
cost=5,
auto_calc_cost_enabled=False)
expected_option = ospf_options.copy()
# Remove fields that are not used in current conditions
del expected_option['advertise_interface_vlan']
del expected_option['key_id']
self._test_create_ospf(members, ospf_options, [expected_option])
def test_create_ospf_with_existent_settings(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': [{'area_id': '5',
'area_type': 'STANDARD',
'authentication_type': 'NONE',
'interface': 'IP',
'is_ipv4': 'true'}]}]
ospf_options = dict(advertise_interface_vlan=10,
area_id='1',
area_type='STANDARD',
authentication_type='NONE',
interface='IP',
is_ipv4=True,
auto_calc_cost_enabled=True)
expected_option = copy.deepcopy(members[0]['ospf_list'])
expected_option.append(ospf_options)
self._test_create_ospf(members, ospf_options, expected_option)
def _test_delete_ospf(self, members, expected_options):
area_id = '5'
member_name = 'my_member'
connector = mock.Mock()
connector.get_object.return_value = members
om = object_manipulator.InfobloxObjectManipulator(connector)
om.delete_ospf(area_id, member_name)
connector.get_object.assert_called_once_with(
'member', {'host_name': 'my_member'},
['ospf_list'], extattrs=None)
connector.update_object.assert_called_once_with(
members[0]['_ref'], expected_options)
def test_delete_ospf_single(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': [{'area_id': '5',
'area_type': 'STANDARD',
'authentication_type': 'NONE',
'interface': 'IP',
'is_ipv4': 'true'}]}]
expected_options = {'ospf_list': []}
self._test_delete_ospf(members, expected_options)
def test_delete_ospf_multiple(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'ospf_list': [{'area_id': '5',
'area_type': 'STANDARD',
'authentication_type': 'NONE',
'interface': 'IP',
'is_ipv4': 'true'},
{'area_id': '2',
'area_type': 'STANDARD',
'authentication_type': 'NONE',
'interface': 'IP',
'is_ipv4': 'true'}]}]
expected_options = {'ospf_list': [{'area_id': '2',
'area_type': 'STANDARD',
'authentication_type': 'NONE',
'interface': 'IP',
'is_ipv4': 'true'}]}
self._test_delete_ospf(members, expected_options)
def _test_bgp_as(self, members, bgp_options, expected_options, **kwargs):
self._test_configuration_update('create_bgp_as', 'member', 'bgp_as',
members, bgp_options, expected_options,
**kwargs)
def _test_bgp_neighbor(self, members, bgp_options, expected_options):
self._test_configuration_update('create_bgp_neighbor', 'member',
'bgp_as', members, bgp_options,
expected_options)
def test_create_bgp_as(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': []}]
bgp_options = {'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True,
'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.10',
'remote_as': 20}
expected_options = {'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True,
'neighbors': [
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.10',
'remote_as': 20}]
}
self._test_bgp_as(members, bgp_options, [expected_options])
def test_update_bgp_as(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True,
'neighbors': [
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.10',
'remote_as': 20}]
}
]}]
bgp_options = {'as': 10,
'holddown': 40,
'keepalive': 60,
'link_detect': False,
'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'newpass',
'comment': 'new_comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.25',
'remote_as': 30
}
expected_options = {'as': 10,
'holddown': 40,
'keepalive': 60,
'link_detect': False,
'neighbors': [
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'newpass',
'comment': 'new_comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.25',
'remote_as': 30}]
}
self._test_bgp_as(members, bgp_options, [expected_options],
old_neighbor_ip='192.168.1.10')
def test_delete_bgp_as(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True}
]}]
member_name = 'my_member'
expected_options = {'bgp_as': []}
connector = mock.Mock()
connector.get_object.return_value = members
om = object_manipulator.InfobloxObjectManipulator(connector)
om.delete_bgp_as(member_name)
connector.get_object.assert_called_once_with(
'member', {'host_name': 'my_member'},
['bgp_as'], extattrs=None)
connector.update_object.assert_called_once_with(
members[0]['_ref'], expected_options)
def test_create_bgp_neighbor(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True,
'neighbors': []}
]}]
bgp_options = {'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.10',
'remote_as': 20}
expected_option = {'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': True,
'neighbors': [{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.10',
'remote_as': 20,
}]}
self._test_bgp_neighbor(members, bgp_options, [expected_option])
def test_create_bgp_neighbor_with_existent_neighbor(self):
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': [{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.15',
'remote_as': 20,
}]}
]}]
bgp_options = {'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'new_pass',
'comment': 'comment2',
'interface': 'LAN_HA',
'neighbor_ip': '172.23.2.10',
'remote_as': 15}
expected_option = {'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': [{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': '192.168.1.15',
'remote_as': 20,
},
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'new_pass',
'comment': 'comment2',
'interface': 'LAN_HA',
'neighbor_ip': '172.23.2.10',
'remote_as': 15,
}]}
self._test_bgp_neighbor(members, bgp_options, [expected_option])
def _test_delete_bgp(self, members, neighbor_ip, expected_options):
member_name = 'my_member'
connector = mock.Mock()
connector.get_object.return_value = members
om = object_manipulator.InfobloxObjectManipulator(connector)
om.delete_bgp_neighbor(member_name, neighbor_ip)
connector.get_object.assert_called_once_with(
'member', {'host_name': 'my_member'},
['bgp_as'], extattrs=None)
connector.update_object.assert_called_once_with(
members[0]['_ref'], expected_options)
def test_delete_bgp_single(self):
neighbor_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': [{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': neighbor_ip,
'remote_as': 20,
}]}]}]
expected_options = {'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': []}]}
self._test_delete_bgp(members, neighbor_ip, expected_options)
def test_delete_bgp_multiple(self):
neighbor_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': [{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'somepass',
'comment': 'comment',
'interface': 'LAN_HA',
'neighbor_ip': neighbor_ip,
'remote_as': 20,
},
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'new_pass',
'comment': 'comment2',
'interface': 'LAN_HA',
'neighbor_ip': '172.23.2.10',
'remote_as': 15,
}]}]}]
expected_options = {'bgp_as': [{'as': 2,
'holddown': 15,
'keepalive': 20,
'link_detect': False,
'neighbors': [
{'authentication_mode': 'MD5',
'bgp_neighbor_pass': 'new_pass',
'comment': 'comment2',
'interface': 'LAN_HA',
'neighbor_ip': '172.23.2.10',
'remote_as': 15,
}]}]}
self._test_delete_bgp(members, neighbor_ip, expected_options)
def _test_additional_ip_list(self, members, server_ip_list,
expected_ip_list):
self._test_configuration_update('add_member_dns_additional_ip',
'member:dns', 'additional_ip_list',
members, server_ip_list,
expected_ip_list)
def test_add_member_dns_additional_ip(self):
anycast_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'additional_ip_list': []}]
expected_options = [anycast_ip]
self._test_additional_ip_list(members, anycast_ip, expected_options)
def test_add_member_dns_additional_ip_existent_ips(self):
anycast_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'additional_ip_list': ['172.23.23.13']}]
expected_options = ['172.23.23.13', anycast_ip]
self._test_additional_ip_list(members, anycast_ip, expected_options)
def _test_remove_ip_list(self, members, server_ip_list,
expected_ip_list):
self._test_configuration_update('remove_member_dns_additional_ip',
'member:dns', 'additional_ip_list',
members, server_ip_list,
expected_ip_list)
def test_remove_member_dns_additional_ip(self):
anycast_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'additional_ip_list': [anycast_ip]}]
expected_options = []
self._test_remove_ip_list(members, anycast_ip, expected_options)
def test_remove_member_dns_additional_ip_multiple_ips(self):
anycast_ip = '192.168.1.15'
members = [
{'_ref': u'member/b35lLnZpcnR1YWxa3fskZSQw:master-113.ft-ac.com',
'additional_ip_list': ['14.53.23.3', anycast_ip]}]
expected_options = ['14.53.23.3']
self._test_remove_ip_list(members, anycast_ip, expected_options)
| |
from magma import *
from parts.lattice.ice40.primitives.PLB import *
from mantle.lattice.mantle40.LUT import LUT2, LUT3
from mantle.lattice.mantle40.logic import Not
__all__ = ['DFF', 'SRFF', 'RSFF', 'JKFF', 'TFF']
__all__ += ['FF', 'FFs']
#
# TODO: add async=True, edge=True (also negedge)
#
def DFF(init=0, ce=False, r=False, s=False, edge=True, sync=True, **kwargs):
assert not (r and s)
# By default
# not connecting a wire to D defaults to 0
# not connecting a wire to C defaults to 0
# not connecting a wire to R defaults to 0
# not connecting a wire to E defaults to 1
# this is better than connecting a 1 to E,
# which causes that signal to be generated
if edge:
# rising edge
if sync:
# synchronous reset
if ce:
if r:
ff = SB_DFFESR(**kwargs)
elif s:
ff = SB_DFFESS(**kwargs)
else:
ff = SB_DFFE(**kwargs)
else:
if r:
ff = SB_DFFSR(**kwargs)
elif s:
ff = SB_DFFSS(**kwargs)
else:
ff = SB_DFF(**kwargs)
else:
# asynchronous reset
if ce:
if r:
ff = SB_DFFER(**kwargs)
elif s:
ff = SB_DFFES(**kwargs)
else:
ff = SB_DFFE(**kwargs)
else:
if r:
ff = SB_DFFR(**kwargs)
elif s:
ff = SB_DFFS(**kwargs)
else:
ff = SB_DFF(**kwargs)
else:
# falling edge
if sync:
# synchronous reset
if ce:
if r:
ff = SB_DFFNESR(**kwargs)
elif s:
ff = SB_DFFNESS(**kwargs)
else:
ff = SB_DFFNE(**kwargs)
else:
if r:
ff = SB_DFFNSR(**kwargs)
elif s:
ff = SB_DFFNSS(**kwargs)
else:
ff = SB_DFFN(**kwargs)
else:
# asynchronous reset
if ce:
if r:
ff = SB_DFFNER(**kwargs)
elif s:
ff = SB_DFFNES(**kwargs)
else:
ff = SB_DFFNE(**kwargs)
else:
if r:
ff = SB_DFFNR(**kwargs)
elif s:
ff = SB_DFFNS(**kwargs)
else:
ff = SB_DFFN(**kwargs)
I = ff.D
O = ff.Q
# ice40 flip-flops are always initialized to 0
if init:
# if init=1, then insert Not before and after the flip-flop
luti = Not()
luto = Not()
wire(luti.O, ff.D)
wire(ff.Q, luto.I0)
I = luti.I0
O = luto.O
args = ['I', I, 'CLK', ff.C]
if ce:
args += ['CE', ff.E]
if r:
args += ['RESET', ff.R]
if s:
args += ['SET', ff.S]
args += ['O', O]
return AnonymousCircuit(args)
FF = DFF
def SRFF(init=0, ce=False, edge=True, sync=True, **kwargs):
"""A S-R flip-flop."""
dff = FF( init=init, ce=ce, edge=edge, sync=sync, **kwargs)
lut = LUT3( (~I1&~I2&I0)|(I1&~I2), **kwargs )
dff(lut)
wire(dff.O, lut.I0)
args = []
if ce:
args += ['input CE', dff.CE]
args += ["S", lut.I1, "R", lut.I2, 'CLK', dff.CLK, "O", dff.O]
return AnonymousCircuit(args)
def RSFF(init=0, ce=False, edge=True, sync=True, **kwargs):
"""A R-S flip-flop."""
dff = FF( init=init, ce=ce, edge=edge, sync=sync, **kwargs)
lut = LUT3( (~I1&~I2&I0)|(I1&~I2), **kwargs )
dff(lut)
wire(dff.O, lut.I0)
args = []
if ce:
args += ['input CE', dff.CE]
args += ["R", lut.I2, "S", lut.I1, 'CLK', dff.CLK, "O", dff.O]
return AnonymousCircuit(args)
def JKFF(ce=False, s=False, r=False, edge=True, sync=True, **kwargs):
"""A J-K flip-flop."""
dff = FF(ce=ce, s=s, r=r, edge=edge, sync=sync, **kwargs)
lut = LUT3( (~I0&I1)|(I0&~I2), **kwargs )
dff(lut)
wire(dff.O, lut.I0)
args = ["J", lut.I1, "K", lut.I2, "O", dff.O, 'CLK', dff.CLK]
if ce: args += ['CE', dff.CE]
if r: args += ['RESET', dff.R]
if s: args += ['SET', dff.S]
return AnonymousCircuit(*args)
def TFF(ce=False, s=False, r=False, edge=True, sync=True, **kwargs):
"""A T flip-flop."""
tff = FF(ce=ce, s=s, r=r, edge=edge, sync=sync, **kwargs)
lut = LUT2( I0^I1, **kwargs )
tff(lut)
wire(tff.O, lut.I0)
args = ["I", lut.I1, "O", tff.O, "CLK", tff.CLK]
if ce: args += ['CE', dff.CE]
if r: args += ['RESET', dff.R]
if s: args += ['SET', dff.S]
return AnonymousCircuit(*args)
#
# Create a column of n FFs
#
# Each FF may have a ce, r, and s signal.
#
def FFs(n, init=0, ce=False, r=False, s=False, edge=True, sync=True):
def f(y):
if isinstance(init, Sequence):
data = init[y]
else:
data = (init >> y) & 1
return FF(init=data, ce=ce, r=r, s=s, edge=edge, sync=sync, loc=(0, y/8, y%8))
return col(f, n)
| |
# -*- coding: utf-8 -*-
import json
import base64
import binascii
import re
import abc
try:
from functools import singledispatch
except ImportError: # pragma: nocover
# future: remove when dropping support for Python 3.3
# compat: backport of singledispatch module introduced in Python 3.4
from singledispatch import singledispatch
from falcon import HTTPMissingHeader, HTTPBadRequest
class BaseUserStorage(metaclass=abc.ABCMeta):
"""Base user storage class that defines required API for user storages.
All built-in graceful authentication middleware classes expect user storage
to have compatible API. Custom authentication middlewares do not need
to use storages.
.. versionadded:: 0.4.0
"""
@abc.abstractmethod
def get_user(
self, identified_with, identifier, req, resp, resource, uri_kwargs
):
"""Get user from the storage.
Args:
identified_with (str): instance of the authentication middleware
that provided the ``identifier`` value.
identifier (str): string that identifies the user (it is specific
for every authentication middleware implementation).
req (falcon.Request): the request object.
resp (falcon.Response): the response object.
resource (object): the resource object.
uri_kwargs (dict): keyword arguments from the URI template.
Returns:
the deserialized user object. Preferably a ``dict`` but it is
application-specific.
"""
raise NotImplementedError # pragma: nocover
@classmethod
def __subclasshook__(cls, klass):
"""Verify implicit class interface."""
if cls is BaseUserStorage:
if any("get_user" in B.__dict__ for B in klass.__mro__):
return True
return NotImplemented
class DummyUserStorage(BaseUserStorage):
"""A dummy storage that never returns users or returns specified default.
This storage is part of :any:`Anonymous` authentication middleware.
It may also be useful for testing purposes or to disable specific
authentication middlewares through app configuration.
Args:
user: User object to return. Defaults to ``None`` (will never
authenticate).
.. versionadded:: 0.4.0
"""
def __init__(self, user=None):
"""Initialize dummy storage."""
self.user = user
def get_user(
self, identified_with, identifier, req, resp, resource, uri_kwargs
):
"""Return default user object."""
return self.user
class IPRangeWhitelistStorage(BaseUserStorage):
"""Simple storage dedicated for :any:`XForwardedFor` authentication.
This storage expects that authentication middleware return client address
from its ``identify()`` method. For example usage see :any:`XForwardedFor`.
Because it is IP range whitelist this storage it cannot distinguish
different users' IP and always returns default user object. If you want to
identify different users by their IP see :any:`KeyValueUserStorage`.
Args:
ip_range: Any object that supports ``in`` operator (i.e. implements the
``__cointains__`` method). The ``__contains__`` method should
return ``True`` if identifier falls into specified whitelist.
Tip: use ``iptools``.
user: Default user object to return on successful authentication.
.. versionadded:: 0.4.0
"""
def __init__(self, ip_range, user):
"""Initialize IP whitelist storage."""
self.ip_range = ip_range
self.user = user
def get_user(
self, identified_with, identifier, req, resp, resource, uri_kwargs
):
"""Return default user object.
.. note::
This implementation expects that ``identifier`` is an user address.
"""
if identifier in self.ip_range:
return self.user
class KeyValueUserStorage(BaseUserStorage):
"""Basic user storage using any key-value store as authentication backend.
Client identities are stored as string under keys matching following
template::
<key_prefix>:<identified_with>:<identifier>
Where:
* ``<key_prefix>`` is the configured key prefix (same as the initialization
argument),
* ``<identified_with>`` is the name of authentication middleware that
provided user identifier,
* ``<identifier>`` is the identifier object that identifies the user.
Note that this key scheme will work only for middlewares that return
identifiers as single string objects. Also the ``<identifier>`` part
of key template is a plain text value of without any hashing algorithm
applied. It may not be secure enough to store user secrets that way.
If you want to use this storage with middleware that uses more complex
identifier format/objects (e.g. the :any:`Basic` class) you will have
to register own identifier format in the :any:`hash_identifier` method.
For details see the :any:`hash_identifier` method docstring or the
:ref:`practical example <auth-practical-example>` section of the
documentation.
Args:
kv_store: Key-value store client instance (e.g. Redis client object).
The ``kv_store`` must provide at least two methods: ``get(key)``
and ``set(key, value)``. The arguments and return values of these
methods must be strings.
key_prefix: key prefix used to store client identities.
serialization: serialization object/module that uses the
``dumps()``/``loads()`` protocol. Defaults to ``json``.
.. versionadded:: 0.4.0
"""
def __init__(self, kv_store, key_prefix='users', serialization=None):
"""Initialize kv_store user storage."""
self.kv_store = kv_store
self.key_prefix = key_prefix
self.serialization = serialization or json
def _get_storage_key(self, identified_with, identifier):
"""Get key string for given user identifier in consistent manner."""
return ':'.join((
self.key_prefix, identified_with.name,
self.hash_identifier(identified_with, identifier),
))
@staticmethod
@singledispatch
def hash_identifier(identified_with, identifier):
"""Create hash from identifier to be used as a part of user lookup.
This method is a ``singledispatch`` function. It allows to register
new implementations for specific authentication middleware classes:
.. code-block:: python
from hashlib import sha1
from graceful.authentication import KeyValueUserStorage, Basic
@KeyValueUserStorage.hash_identifier.register(Basic)
def _(identified_with, identifier):
return ":".join((
identifier[0],
sha1(identifier[1].encode()).hexdigest(),
))
Args:
identified_with (str): name of the authentication middleware used
to identify the user.
identifier (str): user identifier string
Return:
str: hashed identifier string
"""
if isinstance(identifier, str):
return identifier
else:
raise TypeError(
"User storage does not support this kind of identifier"
)
def get_user(
self, identified_with, identifier, req, resp, resource, uri_kwargs
):
"""Get user object for given identifier.
Args:
identified_with (object): authentication middleware used
to identify the user.
identifier: middleware specifix user identifier (string or tuple
in case of all built in authentication middleware classes).
Returns:
dict: user object stored in Redis if it exists, otherwise ``None``
"""
stored_value = self.kv_store.get(
self._get_storage_key(identified_with, identifier)
)
if stored_value is not None:
user = self.serialization.loads(stored_value.decode())
else:
user = None
return user
def register(self, identified_with, identifier, user):
"""Register new key for given client identifier.
This is only a helper method that allows to register new
user objects for client identities (keys, tokens, addresses etc.).
Args:
identified_with (object): authentication middleware used
to identify the user.
identifier (str): user identifier.
user (str): user object to be stored in the backend.
"""
self.kv_store.set(
self._get_storage_key(identified_with, identifier),
self.serialization.dumps(user).encode(),
)
class BaseAuthenticationMiddleware:
"""Base class for all authentication middleware classes.
Args:
user_storage (BaseUserStorage): a storage object used to retrieve
user object using their identifier lookup.
name (str): custom name of the authentication middleware useful
for handling custom user storage backends. Defaults to middleware
class name.
.. versionadded:: 0.4.0
"""
#: challenge returned in WWW-Authenticate header on non authorized
#: requests.
challenge = None
#: defines if Authentication middleware requires valid storage
#: object to identify users
only_with_storage = False
def __init__(self, user_storage=None, name=None):
"""Initialize authentication middleware."""
self.user_storage = user_storage
self.name = (
name if name else self.__class__.__name__
)
if (
self.only_with_storage and
not isinstance(self.user_storage, BaseUserStorage)
):
raise ValueError(
"{} authentication middleware requires valid storage. Got {}."
"".format(self.__class__.__name__, self.user_storage)
)
def process_resource(self, req, resp, resource, uri_kwargs=None):
"""Process resource after routing to it.
This is basic falcon middleware handler.
Args:
req (falcon.Request): request object
resp (falcon.Response): response object
resource (object): resource object matched by falcon router
uri_kwargs (dict): additional keyword argument from uri template.
For ``falcon<1.0.0`` this is always ``None``
"""
if 'user' in req.context:
return
identifier = self.identify(req, resp, resource, uri_kwargs)
user = self.try_storage(identifier, req, resp, resource, uri_kwargs)
if user is not None:
req.context['user'] = user
# if did not succeed then we need to add this to list of available
# challenges.
elif self.challenge is not None:
req.context.setdefault(
'challenges', list()
).append(self.challenge)
def identify(self, req, resp, resource, uri_kwargs):
"""Identify the user that made the request.
Args:
req (falcon.Request): request object
resp (falcon.Response): response object
resource (object): resource object matched by falcon router
uri_kwargs (dict): additional keyword argument from uri template.
For ``falcon<1.0.0`` this is always ``None``
Returns:
object: a user object (preferably a dictionary).
"""
raise NotImplementedError # pragma: nocover
def try_storage(self, identifier, req, resp, resource, uri_kwargs):
"""Try to find user in configured user storage object.
Args:
identifier: User identifier.
Returns:
user object.
"""
if identifier is None:
user = None
# note: if user_storage is defined, always use it in order to
# authenticate user.
elif self.user_storage is not None:
user = self.user_storage.get_user(
self, identifier, req, resp, resource, uri_kwargs
)
# note: some authentication middleware classes may not require
# to be initialized with their own user_storage. In such
# case this will always authenticate with "syntetic user"
# if there is a valid indentity.
elif self.user_storage is None and not self.only_with_storage:
user = {
'identified_with': self,
'identifier': identifier
}
else: # pragma: nocover
# note: this should not happen if the base class is properly
# initialized. Still, user can skip super().__init__() call.
user = None
return user
class Basic(BaseAuthenticationMiddleware):
"""Authenticate user with Basic auth as specified by `RFC 7617`_.
Token authentication takes form of ``Authorization`` header in the
following form::
Authorization: Basic <credentials>
Whre `<credentials>` is base64 encoded username and password separated by
single colon charactes (refer to official RFC). Usernames must not contain
colon characters!
If client fails to authenticate on protected endpoint the response will
include following challenge::
WWW-Authenticate: Basic realm="<realm>"
Where ``<realm>`` is the value of configured authentication realm.
This middleware **must** be configured with ``user_storage`` that provides
access to database of client API keys and their identities. Additionally.
the ``identifier`` received by user storage in the ``get_user()`` method
is a decoded ``<username>:<password>`` string. If you need to apply any
hash function before hitting database in your user storage handler, you
should split it using followitg code::
username, _, password = identifier.partition(":")
Args:
realm (str): name of the protected realm. This can be only alphanumeric
string with spaces (see: the ``REALM_RE`` pattern).
user_storage (BaseUserStorage): a storage object used to retrieve
user object using their identifier lookup.
name (str): custom name of the authentication middleware useful
for handling custom user storage backends. Defaults to middleware
class name.
.. versionadded:: 0.4.0
.. _RFC 7617: https://tools.ietf.org/html/rfc7616
"""
only_with_storage = True
#: regular expression used to validate configured realm
REALM_RE = re.compile(r"^[\w ]+$")
def __init__(self, user_storage=None, name=None, realm="api"):
"""Initialize middleware and validate realm string."""
if not self.REALM_RE.match(realm):
raise ValueError(
"realm argument should match '{}' regular expression"
"".format(self.REALM_RE.pattern)
)
self.challenge = "Basic realm={}".format(realm)
super(Basic, self).__init__(user_storage, name)
def identify(self, req, resp, resource, uri_kwargs):
"""Identify user using Authenticate header with Basic auth."""
header = req.get_header("Authorization", False)
auth = header.split(" ") if header else None
if auth is None or auth[0].lower() != 'basic':
return None
if len(auth) != 2:
raise HTTPBadRequest(
"Invalid Authorization header",
"The Authorization header for Basic auth should be in form:\n"
"Authorization: Basic <base64-user-pass>"
)
user_pass = auth[1]
try:
decoded = base64.b64decode(user_pass).decode()
except (TypeError, UnicodeDecodeError, binascii.Error):
raise HTTPBadRequest(
"Invalid Authorization header",
"Credentials for Basic auth not correctly base64 encoded."
)
username, _, password = decoded.partition(":")
return username, password
class XAPIKey(BaseAuthenticationMiddleware):
"""Authenticate user with ``X-Api-Key`` header.
The X-Api-Key authentication takes a form of ``X-Api-Key`` header in the
following form::
X-Api-Key: <key_value>
Where ``<key_value>`` is a secret string known to both client and server.
Example of valid header::
X-Api-Key: 6fa459ea-ee8a-3ca4-894e-db77e160355e
If client fails to authenticate on protected endpoint the response will
include following challenge::
WWW-Authenticate: X-Api-Key
.. note::
This method functionally equivalent to :any:`Token` and is included
only to ease migration of old applications that could use such
authentication method in past. If you're building new API and require
only simple token-based authentication you should prefere
:any:`Token` middleware.
This middleware **must** be configured with ``user_storage`` that provides
access to database of client API keys and their identities.
.. versionadded:: 0.4.0
"""
challenge = 'X-Api-Key'
only_with_storage = True
def identify(self, req, resp, resource, uri_kwargs):
"""Initialize X-Api-Key authentication middleware."""
try:
return req.get_header('X-Api-Key', True)
except (KeyError, HTTPMissingHeader):
pass
class Token(BaseAuthenticationMiddleware):
"""Authenticate user using Token authentication.
Token authentication takes form of ``Authorization`` header::
Authorization: Token <token_value>
Where ``<token_value>`` is a secret string known to both client and server.
Example of valid header::
Authorization: Token 6fa459ea-ee8a-3ca4-894e-db77e160355e
If client fails to authenticate on protected endpoint the response will
include following challenge::
WWW-Authenticate: Token
This middleware **must** be configured with ``user_storage`` that provides
access to database of client tokens and their identities.
.. versionadded:: 0.4.0
"""
challenge = 'Token'
only_with_storage = True
def identify(self, req, resp, resource, uri_kwargs):
"""Identify user using Authenticate header with Token auth."""
header = req.get_header('Authorization', False)
auth = header.split(' ') if header else None
if auth is None or auth[0].lower() != 'token':
return None
if len(auth) != 2:
raise HTTPBadRequest(
"Invalid Authorization header",
"The Authorization header for Token auth should be in form:\n"
"Authorization: Token <token_value>"
)
return auth[1]
class XForwardedFor(BaseAuthenticationMiddleware):
"""Authenticate user with ``X-Forwarded-For`` header or remote address.
Args:
remote_address_fallback (bool): Use fallback to ``REMOTE_ADDR`` value
from WSGI environment dictionary if ``X-Forwarded-For`` header is
not available. Defaults to ``False``.
This authentication middleware is usually used with the
:any:`IPRangeWhitelistStorage` e.g:
.. code-block:: python
from iptools import IPRangeList
import falcon
from graceful import authentication
IP_WHITELIST = IpRangeList(
'127.0.0.1',
# ...
)
auth_middleware = authentication.XForwardedFor(
user_storage=authentication.IPWRangehitelistStorage(
IP_WHITELIST, user={"username": "internal"}
)
)
api = application = falcon.API(middleware=[auth_middleware])
.. note::
Using this middleware class is **highly unrecommended** if you
are not able to ensure that contents of ``X-Forwarded-For`` header
can be trusted. This requires proper reverse proxy and network
configuration. It is also recommended to at least use the static
:any:`IPRangeWhitelistStorage` as the user storage.
.. versionadded:: 0.4.0
"""
challenge = None
only_with_storage = False
def __init__(
self, user_storage=None, name=None, remote_address_fallback=False
):
"""Initialize middleware and set default arguments."""
super().__init__(user_storage, name)
self.remote_address_fallback = remote_address_fallback
def _get_client_address(self, req):
"""Get address from ``X-Forwarded-For`` header or use remote address.
Remote address is used if the ``X-Forwarded-For`` header is not
available. Note that this may not be safe to depend on both without
proper authorization backend.
Args:
req (falcon.Request): falcon.Request object.
Returns:
str: client address.
"""
try:
forwarded_for = req.get_header('X-Forwarded-For', True)
return forwarded_for.split(',')[0].strip()
except (KeyError, HTTPMissingHeader):
return (
req.env.get('REMOTE_ADDR') if self.remote_address_fallback
else None
)
def identify(self, req, resp, resource, uri_kwargs):
"""Identify client using his address."""
return self._get_client_address(req)
class Anonymous(BaseAuthenticationMiddleware):
"""Dummy authentication middleware that authenticates every request.
It makes every every request authenticated with default value of
anonymous user. This authentication middleware may be used in order
to simplify custom authorization code since it will ensure that
every request context will have the ``'user'`` variable defined.
.. note::
This middleware will always add the default user to the request
context if no other previous authentication middleware resolved.
So if this middleware is used it makes no sense to:
* Use the :any:`authentication_required` decorator.
* Define any other authentication middleware after this one.
Args:
user: Default anonymous user object.
.. versionadded:: 0.4.0
"""
challenge = None
only_with_storage = True
def __init__(self, user):
"""Initialize anonymous authentication middleware."""
# note: DummyUserStorage allows to always return the same user
# object that was passed as initialization value
super().__init__(user_storage=DummyUserStorage(user))
def identify(self, req, resp, resource, uri_kwargs):
"""Identify user with a dummy sentinel value."""
# note: this is just a sentinel value to trigger successful
# lookup in the dummy user storage
return ...
| |
import sys
import os
import string
import appdirs
import tempfile
import random
import sqlite3
import datetime
import logging
import platform
from pathlib import Path
from functools import lru_cache
import json
from configVar import config_vars
from pybatch import PythonBatchRuntime
from pyinstl.cmdOptions import CommandLineOptions, read_command_line_options
from pyinstl.instlException import InstlException
import utils
#utils.set_max_open_files(2048)
from utils.log_utils import config_logger
log = logging.getLogger()
log.setLevel(logging.DEBUG)
current_os_names = utils.get_current_os_names()
os_family_name = current_os_names[0]
os_second_name = current_os_names[0]
if len(current_os_names) > 1:
os_second_name = current_os_names[1]
@lru_cache(maxsize=None)
def get_path_to_instl_app():
"""
@return: returns the path to this
"""
application_path = None
if getattr(sys, 'frozen', False):
application_path = Path(sys.executable).resolve()
elif __file__:
application_path = Path(__file__).resolve().parent.parent.joinpath('instl')
return application_path
@lru_cache(maxsize=None)
def get_instl_launch_command():
"""
@return: returns the path to this
"""
launch_command = None
exec_path = get_path_to_instl_app()
if getattr(sys, 'frozen', False):
launch_command = utils.quoteme_double(os.fspath(exec_path))
elif __file__:
if os_family_name == "Win":
launch_command = " ".join((utils.quoteme_double(sys.executable), utils.quoteme_double(os.fspath(exec_path))))
else:
launch_command = utils.quoteme_double(os.fspath(exec_path))
return launch_command
@lru_cache(maxsize=None)
def get_data_folder():
""" get the path to where we can find data folders such as defaults or help
data folder should be the instl folder where either instl (in case running directly form python)
or instl.exe (in case running frozen). In both cases this is the parent folder of instl.
"""
application_path = get_path_to_instl_app()
data_folder = Path(application_path).parent
return data_folder
class InvocationReporter(PythonBatchRuntime):
def __init__(self, argv, **kwargs) -> None:
super().__init__(name="InvocationReporter", **kwargs) #TODO: ask Shai about the name arg
self.start_time = datetime.datetime.now()
self.random_invocation_name = ''.join(random.choice(string.ascii_lowercase) for i in range(16))
self.argv = argv.copy() # argument argv is usually sys.argv, which might change with recursive process calls
def enter_self(self) -> None:
try:
vendor_name = os.environ.setdefault("VENDOR_NAME", "Waves Audio")
app_name = os.environ.setdefault("APPLICATION_NAME", "Waves Central")
config_logger(argv=self.argv, config_vars=config_vars)
log.debug(f"===== {self.random_invocation_name} =====")
log.debug(f"Start: {self.start_time}")
log.debug(f"instl: {self.argv[0]}")
log.debug(f'argv: {" ".join(self.argv[1:])}')
except Exception as e:
log.warning(f'instl log file report start failed - {e}')
def exit_self(self, exit_return) -> None:
# self.doing = self.doing if self.doing else utils.get_latest_action_from_stack()
try:
end_time = datetime.datetime.now()
log.debug(f"Run time: {self.command_time_sec}")
log.debug(f"End: {end_time}")
log.debug(f"===== {self.random_invocation_name} =====")
except Exception as e:
log.warning(f'InvocationReporter.__exit__ internal exception - {e}')
def instl_own_main(argv):
""" Main instl entry point. Reads command line options and decides if to go into interactive or client mode.
"""
with InvocationReporter(argv, report_own_progress=False):
argv = argv.copy() # argument argv is usually sys.argv, which might change with recursive process calls
options = CommandLineOptions()
command_names = read_command_line_options(options, argv[1:])
initial_vars = {"__INSTL_EXE_PATH__": get_path_to_instl_app(),
"__CURR_WORKING_DIR__": utils.safe_getcwd(), # the working directory when instl was launched
"__INSTL_LAUNCH_COMMAND__": get_instl_launch_command(),
"__INSTL_DATA_FOLDER__": get_data_folder(),
"__INSTL_DEFAULTS_FOLDER__": "$(__INSTL_DATA_FOLDER__)/defaults",
"__INSTL_COMPILED__": str(getattr(sys, 'frozen', False)),
"__PYTHON_VERSION__": sys.version_info,
"__PLATFORM_NODE__": platform.node(),
"__PYSQLITE3_VERSION__": sqlite3.version,
"__SQLITE_VERSION__": sqlite3.sqlite_version,
"__COMMAND_NAMES__": command_names,
"__CURRENT_OS__": os_family_name,
"__CURRENT_OS_SECOND_NAME__": os_second_name,
"__CURRENT_OS_NAMES__": current_os_names,
"__CURRENT_OS_DESCRIPTION__": utils.get_os_description(),
"__SITE_DATA_DIR__": os.path.normpath(appdirs.site_data_dir()),
"__SITE_CONFIG_DIR__": os.path.normpath(appdirs.site_config_dir()),
"__USER_DATA_DIR__": os.path.normpath(appdirs.user_data_dir()),
"__USER_CONFIG_DIR__": os.path.normpath(appdirs.user_config_dir()),
"__USER_HOME_DIR__": os.path.normpath(os.path.expanduser("~")),
"__USER_DESKTOP_DIR__": os.path.normpath("$(__USER_HOME_DIR__)/Desktop"),
"__USER_TEMP_DIR__": os.path.normpath(os.path.join(tempfile.gettempdir(), "$(SYNC_BASE_URL_MAIN_ITEM)/$(REPO_NAME)")),
"__SYSTEM_LOG_FILE_PATH__": utils.get_system_log_file_path(),
"__INVOCATION_RANDOM_ID__": ''.join(random.choice(string.ascii_lowercase) for _ in range(16)),
"__SUDO_USER__": os.environ.get("SUDO_USER", "no set"),
# VENDOR_NAME, APPLICATION_NAME need to be set so logging can be redirected to the correct folder
"VENDOR_NAME": os.environ.get("VENDOR_NAME", "Waves Audio"),
"APPLICATION_NAME": os.environ.get("APPLICATION_NAME", "Waves Central"),
"__ARGV__": argv,
"ACTING_UID": -1,
"ACTING_GID": -1,
}
if os_family_name != "Win":
initial_vars.update(
{"__USER_ID__": str(os.getuid()),
"__GROUP_ID__": str(os.getgid())})
else:
initial_vars.update(
{"__USER_ID__": -1,
"__GROUP_ID__": -1,
"__WHO_LOCKS_FILE_DLL_PATH__": "$(__INSTL_DATA_FOLDER__)/who_locks_file.dll"})
instance = None
if options.__MAIN_COMMAND__ == "command-list":
from pyinstl.instlCommandList import run_commands_from_file
run_commands_from_file(initial_vars, options)
elif options.mode == "client": #shai, maybe add a log here? before all imports
log.debug("begin, importing instl object") #added by oren
from pyinstl.instlClient import InstlClientFactory
instance = InstlClientFactory(initial_vars, options.__MAIN_COMMAND__)
instance.progress("welcome to instl", instance.get_version_str(short=True), options.__MAIN_COMMAND__)
instance.init_from_cmd_line_options(options)
instance.do_command()# after all preprartion is done, we execute the command itself
elif options.mode == "doit":
from pyinstl.instlDoIt import InstlDoIt
instance = InstlDoIt(initial_vars)
instance.progress("welcome to instl", instance.get_version_str(short=True), options.__MAIN_COMMAND__)
instance.init_from_cmd_line_options(options)
instance.do_command()
elif options.mode == "do_something":
from pyinstl.instlMisc import InstlMisc
instance = InstlMisc(initial_vars, options.__MAIN_COMMAND__)
instance.progress("welcome to instl", instance.get_version_str(short=True), options.__MAIN_COMMAND__)
instance.init_from_cmd_line_options(options)
instance.do_command()
elif not getattr(sys, 'frozen', False): # these modes are not available in compiled instl to avoid issues such as import errors for users
if options.mode == "admin":
if os_family_name not in ("Linux", "Mac"):
raise EnvironmentError("instl admin commands can only run under Mac or Linux")
from pyinstl.instlAdmin import InstlAdmin
instance = InstlAdmin(initial_vars)
instance.progress("welcome to instl", instance.get_version_str(short=True), options.__MAIN_COMMAND__)
instance.init_from_cmd_line_options(options)
instance.do_command()
elif options.mode == "interactive":
from pyinstl.instlClient import InstlClient
client = InstlClient(initial_vars)
client.init_from_cmd_line_options(options)
from pyinstl.instlAdmin import InstlAdmin
from pyinstl.instlInstanceBase_interactive import go_interactive
admin = InstlAdmin(initial_vars)
admin.init_from_cmd_line_options(options)
go_interactive(client, admin)
elif options.mode == "gui":
from pyinstl.instlGui import InstlGui
instance = InstlGui(initial_vars)
instance.init_from_cmd_line_options(options)
instance.do_command()
# make sure instance's dispose functions are called
if instance is not None:
instance.close()
| |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Leapcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Leapcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| |
# Copyright 2011 Andrew Bogott for the Wikimedia Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import webob
from nova.api.openstack.compute.contrib import flavormanage
from nova.compute import instance_types
from nova import exception
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
def fake_get_instance_type_by_flavor_id(flavorid, read_deleted='yes'):
if flavorid == 'failtest':
raise exception.NotFound("Not found sucka!")
elif not str(flavorid) == '1234':
raise Exception("This test expects flavorid 1234, not %s" % flavorid)
if read_deleted != 'no':
raise test.TestingException("Should not be reading deleted")
return {
'root_gb': 1,
'ephemeral_gb': 1,
'name': u'frob',
'deleted': False,
'created_at': datetime.datetime(2012, 1, 19, 18, 49, 30, 877329),
'updated_at': None,
'memory_mb': 256,
'vcpus': 1,
'flavorid': flavorid,
'swap': 0,
'rxtx_factor': 1.0,
'extra_specs': {},
'deleted_at': None,
'vcpu_weight': None,
'id': 7,
'is_public': True,
'disabled': False,
}
def fake_destroy(flavorname):
pass
def fake_create(name, memory_mb, vcpus, root_gb, ephemeral_gb,
flavorid, swap, rxtx_factor, is_public):
if flavorid is None:
flavorid = 1234
newflavor = fake_get_instance_type_by_flavor_id(flavorid,
read_deleted="no")
newflavor["name"] = name
newflavor["memory_mb"] = int(memory_mb)
newflavor["vcpus"] = int(vcpus)
newflavor["root_gb"] = int(root_gb)
newflavor["ephemeral_gb"] = int(ephemeral_gb)
newflavor["swap"] = swap
newflavor["rxtx_factor"] = float(rxtx_factor)
newflavor["is_public"] = bool(is_public)
return newflavor
class FlavorManageTest(test.TestCase):
def setUp(self):
super(FlavorManageTest, self).setUp()
self.stubs.Set(instance_types,
"get_instance_type_by_flavor_id",
fake_get_instance_type_by_flavor_id)
self.stubs.Set(instance_types, "destroy", fake_destroy)
self.stubs.Set(instance_types, "create", fake_create)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Flavormanage', 'Flavorextradata',
'Flavor_access', 'Flavor_rxtx', 'Flavor_swap'])
self.controller = flavormanage.FlavorManageController()
self.app = fakes.wsgi_app(init_only=('flavors',))
def test_delete(self):
req = fakes.HTTPRequest.blank('/v2/123/flavors/1234')
res = self.controller._delete(req, 1234)
self.assertEqual(res.status_int, 202)
# subsequent delete should fail
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._delete, req, "failtest")
def test_create(self):
expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": 1234,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
url = '/v2/fake/flavors'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(expected)
res = req.get_response(self.app)
body = jsonutils.loads(res.body)
for key in expected["flavor"]:
self.assertEquals(body["flavor"][key], expected["flavor"][key])
def test_create_public_default(self):
flavor = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": 1234,
"swap": 512,
"rxtx_factor": 1,
}
}
expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": 1234,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
self.stubs.Set(instance_types, "create", fake_create)
url = '/v2/fake/flavors'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(flavor)
res = req.get_response(self.app)
body = jsonutils.loads(res.body)
for key in expected["flavor"]:
self.assertEquals(body["flavor"][key], expected["flavor"][key])
def test_create_without_flavorid(self):
expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
url = '/v2/fake/flavors'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(expected)
res = req.get_response(self.app)
body = jsonutils.loads(res.body)
for key in expected["flavor"]:
self.assertEquals(body["flavor"][key], expected["flavor"][key])
def test_instance_type_exists_exception_returns_409(self):
expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": 1235,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
def fake_create(name, memory_mb, vcpus, root_gb, ephemeral_gb,
flavorid, swap, rxtx_factor, is_public):
raise exception.InstanceTypeExists()
self.stubs.Set(instance_types, "create", fake_create)
url = '/v2/fake/flavors'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(expected)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 409)
| |
""" Wrappers for events around gtk.TreeView objects """
import baseevents, gtk, logging
from .. import treeviewextract
from storytext.definitions import UseCaseScriptError
class TreeColumnHelper:
@classmethod
def findColumn(cls, treeView, columnName):
for column in treeView.get_columns():
if cls.getColumnName(column) == columnName:
return column
raise UseCaseScriptError, "Could not find column with name '" + columnName + "'"
@staticmethod
def getColumnName(column):
name = column.get_data("name")
if name:
return name
else:
# PyGTK 2.16 has started returning None here...
return column.get_title() or ""
class TreeColumnClickEvent(baseevents.SignalEvent):
signalName = "clicked"
def __init__(self, name, widget, argumentParseData):
self.column = TreeColumnHelper.findColumn(widget, argumentParseData)
baseevents.SignalEvent.__init__(self, name, widget)
def connectRecord(self, method):
self._connectRecord(self.column, method)
@classmethod
def getClassWithSignal(cls):
return gtk.TreeViewColumn
def getChangeMethod(self):
return self.column.emit
@classmethod
def getAssociatedSignatures(cls, widget):
signatures = []
for column in widget.get_columns():
if column.get_clickable():
signatures.append(cls.signalName + "." + TreeColumnHelper.getColumnName(column))
return signatures
class TreeViewEvent(baseevents.GtkEvent):
def __init__(self, name, widget, *args):
baseevents.GtkEvent.__init__(self, name, widget)
self.indexer = TreeViewIndexer.getIndexer(widget)
def _outputForScript(self, iter, *args):
return self.name + " " + self.indexer.iter2string(iter)
def _outputForScriptFromPath(self, path, *args):
return self.name + " " + self.indexer.path2string(path)
def getGenerationArguments(self, argumentString):
return [ self.indexer.string2path(argumentString) ] + self.getTreeViewArgs()
def getTreeViewArgs(self):
return []
def isRelevantSelection(self, prevEvent):
return isinstance(prevEvent, TreeSelectionEvent) and self.widget is prevEvent.widget
class RowExpandEvent(TreeViewEvent):
signalName = "row-expanded"
def getChangeMethod(self):
return self.widget.expand_row
def getProgrammaticChangeMethods(self):
return [ self.widget.expand_to_path, self.widget.expand_all ]
def getTreeViewArgs(self):
# don't open all subtree parts
return [ False ]
class RowCollapseEvent(TreeViewEvent):
signalName = "row-collapsed"
def getChangeMethod(self):
return self.widget.collapse_row
def implies(self, prevLine, prevEvent, view, iter, path, *args):
return self.isRelevantSelection(prevEvent) and self.isDeselectionUnder(prevEvent, path)
def isDeselectionUnder(self, prevEvent, path):
for deselectName in prevEvent.prevDeselections:
deselectPath = self.indexer.string2path(deselectName)
if len(deselectPath) > len(path) and deselectPath[:len(path)] == path:
return True
return False
class RowActivationEvent(TreeViewEvent):
signalName = "row-activated"
def getChangeMethod(self):
return self.widget.row_activated
def _outputForScript(self, path, *args):
return self._outputForScriptFromPath(path)
def generate(self, argumentString):
# clear the selection before generating as that's what the real event does
self.widget.get_selection().unselect_all()
TreeViewEvent.generate(self, argumentString)
def getTreeViewArgs(self):
# We don't care which column right now
return [ self.widget.get_column(0) ]
def implies(self, prevLine, prevEvent, *args):
return self.isRelevantSelection(prevEvent)
class RowRightClickEvent(baseevents.RightClickEvent):
def __init__(self, name, widget, *args):
baseevents.RightClickEvent.__init__(self, name, widget)
self.indexer = TreeViewIndexer.getIndexer(widget)
def _outputForScript(self, event, *args):
pathInfo = self.widget.get_path_at_pos(int(event.x), int(event.y))
return self.name + " " + self.indexer.path2string(pathInfo[0])
def getAreaToClick(self, argumentString):
path = self.indexer.string2path(argumentString)
return self.widget.get_cell_area(path, self.widget.get_column(0))
class CellEvent(TreeViewEvent):
def __init__(self, name, widget, columnName, property):
column = TreeColumnHelper.findColumn(widget, columnName)
self.cellRenderer = self.findRenderer(column)
self.extractor = treeviewextract.getExtractor(column, self.cellRenderer, property)
TreeViewEvent.__init__(self, name, widget)
def getValue(self, renderer, path, *args):
model = self.widget.get_model()
iter = model.get_iter(path)
return self.extractor.getValue(model, iter)
def getChangeMethod(self):
return self.cellRenderer.emit
def connectRecord(self, method):
self._connectRecord(self.cellRenderer, method)
def _outputForScript(self, path, *args):
return self.name + " " + self.indexer.path2string(path)
def getPathAsString(self, path):
# For some reason, the treemodel access methods I use
# don't like the (3,0) list-type paths created by
# the above call, so we'll have to manually create a
# '3:0' string-type path instead ...
strPath = ""
for i in xrange(0, len(path)):
strPath += str(path[i])
if i < len(path) - 1:
strPath += ":"
return strPath
@classmethod
def findRenderer(cls, column):
for renderer in column.get_cell_renderers():
if isinstance(renderer, cls.getClassWithSignal()):
return renderer
@classmethod
def getAssociatedSignatures(cls, widget):
signatures = []
for column in widget.get_columns():
if cls.findRenderer(column):
rootName = cls.signalName + "." + TreeColumnHelper.getColumnName(column)
signatures += cls.getSignaturesFrom(rootName)
return signatures
@classmethod
def getSignaturesFrom(cls, rootName):
return [ rootName ]
class CellToggleEvent(CellEvent):
signalName = "toggled"
def __init__(self, name, widget, argumentParseData):
columnName, stateStr = argumentParseData.rsplit(".", 1)
self.relevantState = stateStr == "true"
CellEvent.__init__(self, name, widget, columnName, "active")
@classmethod
def getClassWithSignal(cls):
return gtk.CellRendererToggle
def shouldRecord(self, *args):
return TreeViewEvent.shouldRecord(self, *args) and self.getValue(*args) == self.relevantState
def getGenerationArguments(self, argumentString):
path = TreeViewEvent.getGenerationArguments(self, argumentString)[0]
return [ self.signalName, self.getPathAsString(path) ]
@classmethod
def getSignaturesFrom(cls, rootName):
return [ rootName + ".true", rootName + ".false" ]
class CellEditEvent(CellEvent):
signalName = "edited"
def __init__(self, *args, **kw):
CellEvent.__init__(self, property="text", *args, **kw)
@classmethod
def getClassWithSignal(cls):
return gtk.CellRendererText
def shouldRecord(self, renderer, path, new_text, *args):
value = self.getValue(renderer, path)
return TreeViewEvent.shouldRecord(self, renderer, path, *args) and new_text != str(value)
def _connectRecord(self, widget, method):
# Push our way to the front of the queue
# We need to be able to tell when things have changed
connectInfo = treeviewextract.cellRendererConnectInfo.get(widget, [])
allArgs = [ info[1] for info in connectInfo ]
for handler, args in connectInfo:
if widget.handler_is_connected(handler):
widget.disconnect(handler)
CellEvent._connectRecord(self, widget, method)
for args in allArgs:
widget.connect(*args)
def _outputForScript(self, path, new_text, *args):
return CellEvent._outputForScript(self, path, new_text, *args) + " = " + new_text
def getGenerationArguments(self, argumentString):
oldName, newName = argumentString.split(" = ")
path = TreeViewEvent.getGenerationArguments(self, oldName)[0]
return [ self.signalName, self.getPathAsString(path), newName ]
class TreeSelectionEvent(baseevents.StateChangeEvent):
def __init__(self, name, widget, *args):
self.indexer = TreeViewIndexer.getIndexer(widget)
self.selection = widget.get_selection()
# cache these before calling base class constructor, or they get intercepted...
self.unselect_iter = self.selection.unselect_iter
self.select_iter = self.selection.select_iter
self.prevSelected = []
self.prevDeselections = []
baseevents.StateChangeEvent.__init__(self, name, widget)
@classmethod
def getClassWithSignal(cls):
return gtk.TreeSelection
@classmethod
def getAssociatedSignatures(cls, widget):
return [ "changed.selection" ]
def connectRecord(self, method):
def selection_disallowed(path):
method(self.selection, path, self)
# Must record event if the selection is rejected
if isinstance(self.selection.set_select_function, treeviewextract.FunctionSelectWrapper):
self.selection.set_select_function.fail_method = selection_disallowed
self._connectRecord(self.selection, method)
def getChangeMethod(self):
return self.select_iter
def getModels(self):
model = self.widget.get_model()
if isinstance(model, gtk.TreeModelFilter):
return model, model.get_model()
else:
return None, model
def shouldRecord(self, *args):
if len(args) > 1 and isinstance(args[1], tuple):
# from selection_disallowed above
return not self.programmaticChange
ret = baseevents.StateChangeEvent.shouldRecord(self, *args)
if not ret:
self.getStateDescription() # update internal stores for programmatic changes
return ret
def getProgrammaticChangeMethods(self):
modelFilter, realModel = self.getModels()
methods = [ self.selection.unselect_all, self.selection.select_all, \
self.selection.select_iter, self.selection.unselect_iter, \
self.selection.select_path, self.selection.unselect_path,self.selection.set_mode,
self.widget.set_model, self.widget.row_activated, self.widget.collapse_row,
realModel.remove, realModel.clear ]
return methods
def eventIsRelevant(self):
return not self.isEmptyReselect() and not self.previousSelectionNowHidden()
def isEmptyReselect(self):
# The user has no way to re-select 0 rows, if this is generated, it's internal/programmatic
return len(self.prevSelected) == 0 and self.selection.count_selected_rows() == 0
def previousSelectionNowHidden(self):
# We assume any change here that involves previously selected rows ceasing to be visible
# implies that a row has been deselected by being hidden, i.e. programmatically
modelFilter, realModel = self.getModels()
if modelFilter:
for rowName in self.prevSelected:
try:
self.indexer.string2iter(rowName)
except UseCaseScriptError:
return True
return False
def getStateDescription(self, *args):
return self._getStateDescription(args, storeSelected=True)
def previousIndex(self, iter):
try:
return self.prevSelected.index(iter)
except ValueError:
return len(self.prevSelected)
def _getStateDescription(self, args, storeSelected=False):
if args and isinstance(args[0], tuple):
# selection function returned false...
return self.indexer.path2string(args[0])
newSelected = self.findSelectedIters()
newSelected.sort(key=self.previousIndex)
if storeSelected:
self.prevDeselections = filter(lambda i: i not in newSelected, self.prevSelected)
self.prevSelected = newSelected
return ",".join(newSelected)
def findSelectedIters(self):
iters = []
self.selection.selected_foreach(self.addSelIter, iters)
return iters
def addSelIter(self, model, path, iter, iters):
iters.append(self.indexer.iter2string(iter))
def generate(self, argumentString):
oldSelected = self.findSelectedIters()
newSelected = self.parseIterNames(argumentString)
toUnselect, toSelect = self.findChanges(oldSelected, newSelected)
if len(toSelect) > 0:
self.selection.unseen_changes = True
for iterName in toUnselect:
self.unselect_iter(self.indexer.string2iter(iterName))
if len(toSelect) > 0:
delattr(self.selection, "unseen_changes")
for iterName in newSelected:
self.select_iter(self.indexer.string2iter(iterName))
# In real life there is no way to do this without being in focus, force the focus over
self.widget.grab_focus()
def findChanges(self, oldSelected, newSelected):
if oldSelected == newSelected: # re-selecting should be recorded as clear-and-reselect, not do nothing
return oldSelected, newSelected
else:
oldSet = set(oldSelected)
newSet = set(newSelected)
if oldSet.issuperset(newSet):
return oldSet.difference(newSet), []
else:
index = self.findFirstDifferent(oldSelected, newSelected)
return oldSelected[index:], newSelected[index:]
def findFirstDifferent(self, oldSelected, newSelected):
for index in range(len(oldSelected)):
# Old set isn't a superset, so index cannot overflow "newSelected" here
if oldSelected[index] != newSelected[index]:
return index
return len(oldSelected)
def parseIterNames(self, argumentString):
if len(argumentString) == 0:
return []
else:
return argumentString.split(",")
def implies(self, prevLine, prevEvent, *args):
if not isinstance(prevEvent, TreeSelectionEvent) or \
not prevLine.startswith(self.name):
return False
prevStateDesc = prevLine[len(self.name) + 1:]
currStateDesc = self._getStateDescription(args[1:])
if len(currStateDesc) > len(prevStateDesc):
return currStateDesc.startswith(prevStateDesc)
elif len(currStateDesc) > 0:
oldSet = set(self.parseIterNames(prevStateDesc))
newSet = set(self.parseIterNames(currStateDesc))
return oldSet.issuperset(newSet)
else:
return False # always assume deselecting everything marks the beginning of a new conceptual action
# Class to provide domain-level lookup for rows in a tree. Convert paths to strings and back again
# Can't store rows on TreeModelFilters, store the underlying rows and convert them at the last minute
class TreeViewIndexer:
allIndexers = {}
@classmethod
def getIndexer(cls, treeView):
return cls.allIndexers.setdefault(treeView, cls(treeView))
def __init__(self, treeview):
self.givenModel = treeview.get_model()
self.model = self.findModelToUse()
self.logger = logging.getLogger("TreeModelIndexer")
self.name2row = {}
self.uniqueNames = {}
self.rendererInfo = self.getFirstTextRenderer(treeview)
self.extractor = None
self.tryPopulateMapping()
def tryPopulateMapping(self):
if not self.extractor:
self.extractor = treeviewextract.getTextExtractor(*self.rendererInfo)
if self.extractor:
self.model.foreach(self.rowInserted)
self.model.connect("row-changed", self.rowInserted)
def getFirstTextRenderer(self, treeview):
for column in treeview.get_columns():
for renderer in column.get_cell_renderers():
if isinstance(renderer, gtk.CellRendererText):
return column, renderer
return None, None
def getValue(self, *args):
return str(self.extractor.getValue(*args)).replace("\n", " / ")
def iter2string(self, iter):
self.tryPopulateMapping()
currentName = self.getValue(self.givenModel, iter)
if not self.uniqueNames.has_key(currentName):
return currentName
path = self.convertFrom(self.givenModel.get_path(iter))
for uniqueName in self.uniqueNames.get(currentName):
for row in self.findAllRows(uniqueName):
if row.get_path() == path:
return uniqueName
def path2string(self, path):
return self.iter2string(self.givenModel.get_iter(path))
def string2iter(self, iterString):
return self.givenModel.get_iter(self.string2path(iterString))
def string2path(self, name):
self.tryPopulateMapping()
rows = self.findAllRows(name)
if len(rows) == 1:
return self.convertTo(rows[0].get_path(), name)
elif len(rows) == 0:
raise UseCaseScriptError, "Could not find row '" + name + "' in Tree View\nKnown names are " + repr(self.name2row.keys())
else:
raise UseCaseScriptError, "'" + name + "' in Tree View is ambiguous, could refer to " \
+ str(len(rows)) + " different paths"
def usesFilter(self):
return isinstance(self.givenModel, gtk.TreeModelFilter)
def findModelToUse(self):
if self.usesFilter():
return self.givenModel.get_model()
else:
return self.givenModel
def convertFrom(self, path):
if self.usesFilter():
return self.givenModel.convert_path_to_child_path(path)
else:
return path
def convertTo(self, path, name):
if self.usesFilter():
pathToUse = self.givenModel.convert_child_path_to_path(path)
if pathToUse is not None:
return pathToUse
else:
raise UseCaseScriptError, "Row '" + name + "' is currently hidden and cannot be accessed"
else:
return path
def rowInserted(self, model, path, iter):
givenName = self.getValue(model, iter)
row = gtk.TreeRowReference(model, path)
if self.store(row, givenName):
allRows = self.findAllRows(givenName)
if len(allRows) > 1:
newNames = self.getNewNames(allRows, givenName)
self.uniqueNames[givenName] = newNames
for row, newName in zip(allRows, newNames):
self.store(row, newName)
def findAllRows(self, name):
storedRows = self.name2row.get(name, [])
if len(storedRows) > 0:
validRows = filter(lambda r: r.get_path() is not None, storedRows)
self.name2row[name] = validRows
return validRows
else:
return storedRows
def store(self, row, name):
rows = self.name2row.setdefault(name, [])
if not row.get_path() in [ r.get_path() for r in rows ]:
self.logger.debug("Storing row named " + repr(name) + " with path " + repr(row.get_path()))
rows.append(row)
return True
else:
return False
def getNewNames(self, rows, oldName):
self.logger.debug(repr(oldName) + " can be applied to " + repr(len(rows)) +
" rows, setting unique names")
parentSuffices = {}
for index, row in enumerate(rows):
iter = self.model.get_iter(row.get_path())
parent = self.model.iter_parent(iter)
parentSuffix = self.getParentSuffix(parent)
parentSuffices.setdefault(parentSuffix, []).append(index)
newNames = [ oldName ] * len(rows)
for parentSuffix, indices in parentSuffices.items():
newName = oldName
if len(parentSuffices) > 1:
newName += parentSuffix
if len(indices) == 1:
self.logger.debug("Name now unique, setting row " + repr(indices[0]) + " name to " + repr(newName))
newNames[indices[0]] = newName
else:
matchingRows = [ rows[ix] for ix in indices ]
parents = map(self.getParentRow, matchingRows)
if None not in parents:
parentNames = self.getNewNames(parents, newName)
for index, parentName in enumerate(parentNames):
self.logger.debug("Name from parents, setting row " + repr(indices[index]) +
" name to " + repr(parentName))
newNames[indices[index]] = parentName
else:
# No other option, enumerate them and identify them by index
for index, row in enumerate(matchingRows):
newNames[indices[index]] = newName + " (" + str(index + 1) + ")"
return newNames
def getParentRow(self, row):
parentIter = self.model.iter_parent(self.model.get_iter(row.get_path()))
if parentIter:
return gtk.TreeRowReference(self.model, self.model.get_path(parentIter))
def getParentSuffix(self, parent):
if parent:
return " under " + self.getValue(self.model, parent)
else:
return " at top level"
| |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper code for the iSCSI volume driver.
"""
import contextlib
import os
import re
import stat
import time
from cinder.brick import exception
from cinder.brick import executor
from cinder.openstack.common import fileutils
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils as putils
LOG = logging.getLogger(__name__)
class TargetAdmin(executor.Executor):
"""iSCSI target administration.
Base class for iSCSI target admin helpers.
"""
def __init__(self, cmd, root_helper, execute):
super(TargetAdmin, self).__init__(root_helper, execute=execute)
self._cmd = cmd
def _run(self, *args, **kwargs):
self._execute(self._cmd, *args, run_as_root=True, **kwargs)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
"""Create an iSCSI target and logical unit."""
raise NotImplementedError()
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
"""Remove an iSCSI target and logical unit."""
raise NotImplementedError()
def _new_target(self, name, tid, **kwargs):
"""Create a new iSCSI target."""
raise NotImplementedError()
def _delete_target(self, tid, **kwargs):
"""Delete a target."""
raise NotImplementedError()
def show_target(self, tid, iqn=None, **kwargs):
"""Query the given target ID."""
raise NotImplementedError()
def _new_logicalunit(self, tid, lun, path, **kwargs):
"""Create a new LUN on a target using the supplied path."""
raise NotImplementedError()
def _delete_logicalunit(self, tid, lun, **kwargs):
"""Delete a logical unit from a target."""
raise NotImplementedError()
class TgtAdm(TargetAdmin):
"""iSCSI target administration using tgtadm."""
VOLUME_CONF = """
<target %s>
backing-store %s
lld iscsi
write-cache %s
</target>
"""
VOLUME_CONF_WITH_CHAP_AUTH = """
<target %s>
backing-store %s
lld iscsi
%s
write-cache %s
</target>
"""
def __init__(self, root_helper, volumes_dir,
target_prefix='iqn.2010-10.org.openstack:',
execute=putils.execute):
super(TgtAdm, self).__init__('tgtadm', root_helper, execute)
self.iscsi_target_prefix = target_prefix
self.volumes_dir = volumes_dir
def _get_target(self, iqn):
(out, err) = self._execute('tgt-admin', '--show', run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
parsed = line.split()
tid = parsed[1]
return tid[:-1]
return None
def _verify_backing_lun(self, iqn, tid):
backing_lun = True
capture = False
target_info = []
(out, err) = self._execute('tgt-admin', '--show', run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line and "Target %s" % tid in line:
capture = True
if capture:
target_info.append(line)
if iqn not in line and 'Target ' in line:
capture = False
if ' LUN: 1' not in target_info:
backing_lun = False
return backing_lun
def _recreate_backing_lun(self, iqn, tid, name, path):
LOG.warning(_('Attempting recreate of backing lun...'))
# Since we think the most common case of this is a dev busy
# (create vol from snapshot) we're going to add a sleep here
# this will hopefully give things enough time to stabilize
# how long should we wait?? I have no idea, let's go big
# and error on the side of caution
time.sleep(10)
try:
(out, err) = self._execute('tgtadm', '--lld', 'iscsi',
'--op', 'new', '--mode',
'logicalunit', '--tid',
tid, '--lun', '1', '-b',
path, run_as_root=True)
LOG.debug('StdOut from recreate backing lun: %s' % out)
LOG.debug('StdErr from recreate backing lun: %s' % err)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to recover attempt to create "
"iscsi backing lun for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': name, 'e': e})
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# Note(jdg) tid and lun aren't used by TgtAdm but remain for
# compatibility
fileutils.ensure_tree(self.volumes_dir)
vol_id = name.split(':')[1]
write_cache = kwargs.get('write_cache', 'on')
if chap_auth is None:
volume_conf = self.VOLUME_CONF % (name, path, write_cache)
else:
volume_conf = self.VOLUME_CONF_WITH_CHAP_AUTH % (name,
path, chap_auth,
write_cache)
LOG.info(_('Creating iscsi_target for: %s') % vol_id)
volumes_dir = self.volumes_dir
volume_path = os.path.join(volumes_dir, vol_id)
f = open(volume_path, 'w+')
f.write(volume_conf)
f.close()
LOG.debug('Created volume path %(vp)s,\n'
'content: %(vc)s'
% {'vp': volume_path, 'vc': volume_conf})
old_persist_file = None
old_name = kwargs.get('old_name', None)
if old_name is not None:
old_persist_file = os.path.join(volumes_dir, old_name)
try:
# with the persistent tgts we create them
# by creating the entry in the persist file
# and then doing an update to get the target
# created.
(out, err) = self._execute('tgt-admin', '--update', name,
run_as_root=True)
LOG.debug("StdOut from tgt-admin --update: %s", out)
LOG.debug("StdErr from tgt-admin --update: %s", err)
# Grab targets list for debug
# Consider adding a check for lun 0 and 1 for tgtadm
# before considering this as valid
(out, err) = self._execute('tgtadm',
'--lld',
'iscsi',
'--op',
'show',
'--mode',
'target',
run_as_root=True)
LOG.debug("Targets after update: %s" % out)
except putils.ProcessExecutionError as e:
LOG.warning(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': e})
#Don't forget to remove the persistent file we created
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (self.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s. Please ensure your tgtd config file "
"contains 'include %(volumes_dir)s/*'") % {
'vol_id': vol_id,
'volumes_dir': volumes_dir, })
raise exception.NotFound()
# NOTE(jdg): Sometimes we have some issues with the backing lun
# not being created, believe this is due to a device busy
# or something related, so we're going to add some code
# here that verifies the backing lun (lun 1) was created
# and we'll try and recreate it if it's not there
if not self._verify_backing_lun(iqn, tid):
try:
self._recreate_backing_lun(iqn, tid, name, path)
except putils.ProcessExecutionError:
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
# Finally check once more and if no go, fail and punt
if not self._verify_backing_lun(iqn, tid):
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
if old_persist_file is not None and os.path.exists(old_persist_file):
os.unlink(old_persist_file)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target for: %s') % vol_id)
vol_uuid_file = vol_name
volume_path = os.path.join(self.volumes_dir, vol_uuid_file)
if not os.path.exists(volume_path):
LOG.warning(_('Volume path %s does not exist, '
'nothing to remove.') % volume_path)
return
if os.path.isfile(volume_path):
iqn = '%s%s' % (self.iscsi_target_prefix,
vol_uuid_file)
else:
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
try:
# NOTE(vish): --force is a workaround for bug:
# https://bugs.launchpad.net/cinder/+bug/1159948
self._execute('tgt-admin',
'--force',
'--delete',
iqn,
run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': e})
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
# NOTE(jdg): There's a bug in some versions of tgt that
# will sometimes fail silently when using the force flag
# https://bugs.launchpad.net/ubuntu/+source/tgt/+bug/1305343
# For now work-around by checking if the target was deleted,
# if it wasn't, try again without the force.
# This will NOT do any good for the case of multiple sessions
# which the force was aded for but it will however address
# the cases pointed out in bug:
# https://bugs.launchpad.net/cinder/+bug/1304122
if self._get_target(iqn):
try:
LOG.warning(_('Silent failure of target removal '
'detected, retry....'))
self._execute('tgt-admin',
'--delete',
iqn,
run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': e})
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
# NOTE(jdg): This *should* be there still but incase
# it's not we don't care, so just ignore it if was
# somehow deleted between entry of this method
# and here
if os.path.exists(volume_path):
os.unlink(volume_path)
else:
LOG.debug('Volume path %s not found at end, '
'of remove_iscsi_target.' % volume_path)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
class IetAdm(TargetAdmin):
"""iSCSI target administration using ietadm."""
def __init__(self, root_helper, iet_conf='/etc/iet/ietd.conf',
iscsi_iotype='fileio', execute=putils.execute):
super(IetAdm, self).__init__('ietadm', root_helper, execute)
self.iet_conf = iet_conf
self.iscsi_iotype = iscsi_iotype
def _is_block(self, path):
mode = os.stat(path).st_mode
return stat.S_ISBLK(mode)
def _iotype(self, path):
if self.iscsi_iotype == 'auto':
return 'blockio' if self._is_block(path) else 'fileio'
else:
return self.iscsi_iotype
@contextlib.contextmanager
def temporary_chown(self, path, owner_uid=None):
"""Temporarily chown a path.
:params path: The path to chown
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
putils.execute('chown', owner_uid, path,
root_helper=self._root_helper, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
putils.execute('chown', orig_uid, path,
root_helper=self._root_helper, run_as_root=True)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# NOTE (jdg): Address bug: 1175207
kwargs.pop('old_name', None)
self._new_target(name, tid, **kwargs)
self._new_logicalunit(tid, lun, path, **kwargs)
if chap_auth is not None:
(type, username, password) = chap_auth.split()
self._new_auth(tid, type, username, password, **kwargs)
conf_file = self.iet_conf
if os.path.exists(conf_file):
try:
volume_conf = """
Target %s
%s
Lun 0 Path=%s,Type=%s
""" % (name, chap_auth, path, self._iotype(path))
with self.temporary_chown(conf_file):
f = open(conf_file, 'a+')
f.write(volume_conf)
f.close()
except putils.ProcessExecutionError as e:
vol_id = name.split(':')[1]
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': e})
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target for volume: %s') % vol_id)
self._delete_logicalunit(tid, lun, **kwargs)
self._delete_target(tid, **kwargs)
vol_uuid_file = vol_name
conf_file = self.iet_conf
if os.path.exists(conf_file):
with self.temporary_chown(conf_file):
try:
iet_conf_text = open(conf_file, 'r+')
full_txt = iet_conf_text.readlines()
new_iet_conf_txt = []
count = 0
for line in full_txt:
if count > 0:
count -= 1
continue
elif re.search(vol_uuid_file, line):
count = 2
continue
else:
new_iet_conf_txt.append(line)
iet_conf_text.seek(0)
iet_conf_text.truncate(0)
iet_conf_text.writelines(new_iet_conf_txt)
finally:
iet_conf_text.close()
def _new_target(self, name, tid, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--params', 'Name=%s' % name,
**kwargs)
def _delete_target(self, tid, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
**kwargs)
def show_target(self, tid, iqn=None, **kwargs):
self._run('--op', 'show',
'--tid=%s' % tid,
**kwargs)
def _new_logicalunit(self, tid, lun, path, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--lun=%d' % lun,
'--params', 'Path=%s,Type=%s' % (path, self._iotype(path)),
**kwargs)
def _delete_logicalunit(self, tid, lun, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
'--lun=%d' % lun,
**kwargs)
def _new_auth(self, tid, type, username, password, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--user',
'--params=%s=%s,Password=%s' % (type, username, password),
**kwargs)
class FakeIscsiHelper(object):
def __init__(self):
self.tid = 1
self._execute = None
def set_execute(self, execute):
self._execute = execute
def create_iscsi_target(self, *args, **kwargs):
self.tid += 1
return self.tid
class LioAdm(TargetAdmin):
"""iSCSI target administration for LIO using python-rtslib."""
def __init__(self, root_helper, lio_initiator_iqns='',
iscsi_target_prefix='iqn.2010-10.org.openstack:',
execute=putils.execute):
super(LioAdm, self).__init__('cinder-rtstool', root_helper, execute)
self.iscsi_target_prefix = iscsi_target_prefix
self.lio_initiator_iqns = lio_initiator_iqns
self._verify_rtstool()
def _verify_rtstool(self):
try:
self._execute('cinder-rtstool', 'verify')
except (OSError, putils.ProcessExecutionError):
LOG.error(_('cinder-rtstool is not installed correctly'))
raise
def _get_target(self, iqn):
(out, err) = self._execute('cinder-rtstool',
'get-targets',
run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
return line
return None
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# tid and lun are not used
vol_id = name.split(':')[1]
LOG.info(_('Creating iscsi_target for volume: %s') % vol_id)
# rtstool requires chap_auth, but unit tests don't provide it
chap_auth_userid = 'test_id'
chap_auth_password = 'test_pass'
if chap_auth is not None:
(chap_auth_userid, chap_auth_password) = chap_auth.split(' ')[1:]
extra_args = []
if self.lio_initiator_iqns:
extra_args.append(self.lio_initiator_iqns)
try:
command_args = ['cinder-rtstool',
'create',
path,
name,
chap_auth_userid,
chap_auth_password]
if extra_args:
command_args.extend(extra_args)
self._execute(*command_args, run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % e)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (self.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
raise exception.NotFound()
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target: %s') % vol_id)
vol_uuid_name = vol_name
iqn = '%s%s' % (self.iscsi_target_prefix, vol_uuid_name)
try:
self._execute('cinder-rtstool',
'delete',
iqn,
run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % e)
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
def initialize_connection(self, volume, connector):
volume_iqn = volume['provider_location'].split(' ')[1]
(auth_method, auth_user, auth_pass) = \
volume['provider_auth'].split(' ', 3)
# Add initiator iqns to target ACL
try:
self._execute('cinder-rtstool', 'add-initiator',
volume_iqn,
auth_user,
auth_pass,
connector['initiator'],
run_as_root=True)
except putils.ProcessExecutionError:
LOG.error(_("Failed to add initiator iqn %s to target") %
connector['initiator'])
raise exception.ISCSITargetAttachFailed(volume_id=volume['id'])
class ISERTgtAdm(TgtAdm):
VOLUME_CONF = """
<target %s>
driver iser
backing-store %s
write_cache %s
</target>
"""
VOLUME_CONF_WITH_CHAP_AUTH = """
<target %s>
driver iser
backing-store %s
%s
write_cache %s
</target>
"""
def __init__(self, root_helper, volumes_dir,
target_prefix='iqn.2010-10.org.iser.openstack:',
execute=putils.execute):
super(ISERTgtAdm, self).__init__(root_helper, volumes_dir,
target_prefix, execute)
| |
"""Grover's algorithm and helper functions.
Todo:
* W gate construction (or perhaps -W gate based on Mermin's book)
* Generalize the algorithm for an unknown function that returns 1 on multiple
qubit states, not just one.
* Implement _represent_ZGate in OracleGate
"""
from __future__ import print_function, division
from sympy import floor, pi, sqrt, sympify, eye
from sympy.core.compatibility import range
from sympy.core.numbers import NegativeOne
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.qexpr import QuantumError
from sympy.physics.quantum.hilbert import ComplexSpace
from sympy.physics.quantum.operator import UnitaryOperator
from sympy.physics.quantum.gate import Gate
from sympy.physics.quantum.qubit import IntQubit
__all__ = [
'OracleGate',
'WGate',
'superposition_basis',
'grover_iteration',
'apply_grover'
]
def superposition_basis(nqubits):
"""Creates an equal superposition of the computational basis.
Parameters
==========
nqubits : int
The number of qubits.
Returns
=======
state : Qubit
An equal superposition of the computational basis with nqubits.
Examples
========
Create an equal superposition of 2 qubits::
>>> from sympy.physics.quantum.grover import superposition_basis
>>> superposition_basis(2)
|0>/2 + |1>/2 + |2>/2 + |3>/2
"""
amp = 1/sqrt(2**nqubits)
return sum([amp*IntQubit(n, nqubits=nqubits) for n in range(2**nqubits)])
class OracleGate(Gate):
"""A black box gate.
The gate marks the desired qubits of an unknown function by flipping
the sign of the qubits. The unknown function returns true when it
finds its desired qubits and false otherwise.
Parameters
==========
qubits : int
Number of qubits.
oracle : callable
A callable function that returns a boolean on a computational basis.
Examples
========
Apply an Oracle gate that flips the sign of ``|2>`` on different qubits::
>>> from sympy.physics.quantum.qubit import IntQubit
>>> from sympy.physics.quantum.qapply import qapply
>>> from sympy.physics.quantum.grover import OracleGate
>>> f = lambda qubits: qubits == IntQubit(2)
>>> v = OracleGate(2, f)
>>> qapply(v*IntQubit(2))
-|2>
>>> qapply(v*IntQubit(3))
|3>
"""
gate_name = u'V'
gate_name_latex = u'V'
#-------------------------------------------------------------------------
# Initialization/creation
#-------------------------------------------------------------------------
@classmethod
def _eval_args(cls, args):
# TODO: args[1] is not a subclass of Basic
if len(args) != 2:
raise QuantumError(
'Insufficient/excessive arguments to Oracle. Please ' +
'supply the number of qubits and an unknown function.'
)
sub_args = (args[0],)
sub_args = UnitaryOperator._eval_args(sub_args)
if not sub_args[0].is_Integer:
raise TypeError('Integer expected, got: %r' % sub_args[0])
if not callable(args[1]):
raise TypeError('Callable expected, got: %r' % args[1])
return (sub_args[0], args[1])
@classmethod
def _eval_hilbert_space(cls, args):
"""This returns the smallest possible Hilbert space."""
return ComplexSpace(2)**args[0]
#-------------------------------------------------------------------------
# Properties
#-------------------------------------------------------------------------
@property
def search_function(self):
"""The unknown function that helps find the sought after qubits."""
return self.label[1]
@property
def targets(self):
"""A tuple of target qubits."""
return sympify(tuple(range(self.args[0])))
#-------------------------------------------------------------------------
# Apply
#-------------------------------------------------------------------------
def _apply_operator_Qubit(self, qubits, **options):
"""Apply this operator to a Qubit subclass.
Parameters
==========
qubits : Qubit
The qubit subclass to apply this operator to.
Returns
=======
state : Expr
The resulting quantum state.
"""
if qubits.nqubits != self.nqubits:
raise QuantumError(
'OracleGate operates on %r qubits, got: %r'
% (self.nqubits, qubits.nqubits)
)
# If function returns 1 on qubits
# return the negative of the qubits (flip the sign)
if self.search_function(qubits):
return -qubits
else:
return qubits
#-------------------------------------------------------------------------
# Represent
#-------------------------------------------------------------------------
def _represent_ZGate(self, basis, **options):
"""
Represent the OracleGate in the computational basis.
"""
nbasis = 2**self.nqubits # compute it only once
matrixOracle = eye(nbasis)
# Flip the sign given the output of the oracle function
for i in range(nbasis):
if self.search_function(IntQubit(i, nqubits=self.nqubits)):
matrixOracle[i, i] = NegativeOne()
return matrixOracle
class WGate(Gate):
"""General n qubit W Gate in Grover's algorithm.
The gate performs the operation ``2|phi><phi| - 1`` on some qubits.
``|phi> = (tensor product of n Hadamards)*(|0> with n qubits)``
Parameters
==========
nqubits : int
The number of qubits to operate on
"""
gate_name = u'W'
gate_name_latex = u'W'
@classmethod
def _eval_args(cls, args):
if len(args) != 1:
raise QuantumError(
'Insufficient/excessive arguments to W gate. Please ' +
'supply the number of qubits to operate on.'
)
args = UnitaryOperator._eval_args(args)
if not args[0].is_Integer:
raise TypeError('Integer expected, got: %r' % args[0])
return args
#-------------------------------------------------------------------------
# Properties
#-------------------------------------------------------------------------
@property
def targets(self):
return sympify(tuple(reversed(range(self.args[0]))))
#-------------------------------------------------------------------------
# Apply
#-------------------------------------------------------------------------
def _apply_operator_Qubit(self, qubits, **options):
"""
qubits: a set of qubits (Qubit)
Returns: quantum object (quantum expression - QExpr)
"""
if qubits.nqubits != self.nqubits:
raise QuantumError(
'WGate operates on %r qubits, got: %r'
% (self.nqubits, qubits.nqubits)
)
# See 'Quantum Computer Science' by David Mermin p.92 -> W|a> result
# Return (2/(sqrt(2^n)))|phi> - |a> where |a> is the current basis
# state and phi is the superposition of basis states (see function
# create_computational_basis above)
basis_states = superposition_basis(self.nqubits)
change_to_basis = (2/sqrt(2**self.nqubits))*basis_states
return change_to_basis - qubits
def grover_iteration(qstate, oracle):
"""Applies one application of the Oracle and W Gate, WV.
Parameters
==========
qstate : Qubit
A superposition of qubits.
oracle : OracleGate
The black box operator that flips the sign of the desired basis qubits.
Returns
=======
Qubit : The qubits after applying the Oracle and W gate.
Examples
========
Perform one iteration of grover's algorithm to see a phase change::
>>> from sympy.physics.quantum.qapply import qapply
>>> from sympy.physics.quantum.qubit import IntQubit
>>> from sympy.physics.quantum.grover import OracleGate
>>> from sympy.physics.quantum.grover import superposition_basis
>>> from sympy.physics.quantum.grover import grover_iteration
>>> numqubits = 2
>>> basis_states = superposition_basis(numqubits)
>>> f = lambda qubits: qubits == IntQubit(2)
>>> v = OracleGate(numqubits, f)
>>> qapply(grover_iteration(basis_states, v))
|2>
"""
wgate = WGate(oracle.nqubits)
return wgate*oracle*qstate
def apply_grover(oracle, nqubits, iterations=None):
"""Applies grover's algorithm.
Parameters
==========
oracle : callable
The unknown callable function that returns true when applied to the
desired qubits and false otherwise.
Returns
=======
state : Expr
The resulting state after Grover's algorithm has been iterated.
Examples
========
Apply grover's algorithm to an even superposition of 2 qubits::
>>> from sympy.physics.quantum.qapply import qapply
>>> from sympy.physics.quantum.qubit import IntQubit
>>> from sympy.physics.quantum.grover import apply_grover
>>> f = lambda qubits: qubits == IntQubit(2)
>>> qapply(apply_grover(f, 2))
|2>
"""
if nqubits <= 0:
raise QuantumError(
'Grover\'s algorithm needs nqubits > 0, received %r qubits'
% nqubits
)
if iterations is None:
iterations = floor(sqrt(2**nqubits)*(pi/4))
v = OracleGate(nqubits, oracle)
iterated = superposition_basis(nqubits)
for iter in range(iterations):
iterated = grover_iteration(iterated, v)
iterated = qapply(iterated)
return iterated
| |
#
# This file is part of pySMT.
#
# Copyright 2014 Andrea Micheli and Marco Gario
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Provides the most used functions in a nicely wrapped API.
This module defines a global environment, so that most methods can be
called without the need to specify an environment or a FormulaManager.
Functions trying to access the global environment should use the
method get_global_env(). Keep in mind that the global state of the
environment might lead to inconsistency and unexpected bugs. This is
particularly true for tests. For tests it is recommended to perform an
environment reset in the setUp phase, to be guaranteed that a fresh
environment is used.
"""
# Enable default deprecation warnings!
import warnings
warnings.simplefilter('default')
import pysmt.typing as types
import pysmt.configuration as config
import pysmt.environment
def get_env():
"""Returns the global environment."""
return pysmt.environment.get_env()
def reset_env():
"""Resets the global environment, and returns the new one."""
return pysmt.environment.reset_env()
##### Shortcuts for FormulaManager #####
def get_type(formula):
"""Returns the type of the formula."""
return get_env().stc.get_type(formula)
def simplify(formula):
"""Returns the simplified version of the formula."""
return get_env().simplifier.simplify(formula)
def substitute(formula, subs):
"""Applies the substitutions defined in the dictionary to the formula."""
return get_env().substituter.substitute(formula, subs)
def serialize(formula, threshold=None):
"""Provides a string representing the formula."""
return get_env().serializer.serialize(formula,
threshold=threshold)
def get_free_variables(formula):
"""Returns the simplified version of the formula."""
return get_env().fvo.get_free_variables(formula)
def get_atoms(formula):
"""Returns the set of atoms of the formula."""
return get_env().ao.get_atoms(formula)
def get_formula_size(formula, measure=None):
"""Returns the size of the formula as measured by the given counting type.
See pysmt.oracles.SizeOracle for details.
"""
return get_env().sizeo.get_size(formula, measure)
##### Nodes Creation #####
def ForAll(variables, formula):
r""".. math:: \forall v_1, \cdots, v_n . \varphi(v_1, \cdots, v_n)"""
return get_env().formula_manager.ForAll(variables, formula)
def Exists(variables, formula):
r""".. math:: \exists v_1, \cdots, v_n . \varphi(v_1, \cdots, v_n)"""
return get_env().formula_manager.Exists(variables, formula)
def Function(vname, params):
r""".. math:: vname(p_1, \cdots, p_n)"""
return get_env().formula_manager.Function(vname, params)
def Not(formula):
r""".. math:: \lnot \varphi"""
return get_env().formula_manager.Not(formula)
def Implies(left, right):
r""".. math:: l \rightarrow r"""
return get_env().formula_manager.Implies(left, right)
def Iff(left, right):
r""".. math:: l \leftrightarrow r """
return get_env().formula_manager.Iff(left, right)
def GE(left, right):
r""".. math:: l \ge r"""
return get_env().formula_manager.GE(left, right)
def Minus(left, right):
r""".. math:: l - r """
return get_env().formula_manager.Minus(left, right)
def Times(left, right):
r""".. math:: l * r"""
return get_env().formula_manager.Times(left, right)
def Div(left, right):
r""".. math:: \frac{l}{r}"""
return get_env().formula_manager.Div(left, right)
def Equals(left, right):
r""".. math:: l = r"""
return get_env().formula_manager.Equals(left, right)
def GT(left, right):
r""".. math:: l > r"""
return get_env().formula_manager.GT(left, right)
def LE(left, right):
r""".. math:: l \le r"""
return get_env().formula_manager.LE(left, right)
def LT(left, right):
r""".. math:: l < r"""
return get_env().formula_manager.LT(left, right)
def Ite(iff, left, right):
r""".. math:: \text{ If } i \text{ Then } l \text{ Else } r"""
return get_env().formula_manager.Ite(iff, left, right)
def Symbol(name, typename=types.BOOL):
"""Returns a symbol with the given name and type."""
return get_env().formula_manager.Symbol(name, typename)
def FreshSymbol(typename=types.BOOL, template=None):
"""Returns a symbol with a fresh name and given type."""
return get_env().formula_manager.FreshSymbol(typename, template)
def Int(value):
"""Returns an Integer constant with the given value."""
return get_env().formula_manager.Int(value)
def Bool(value):
"""Returns a Boolean constant with the given value."""
return get_env().formula_manager.Bool(value)
def Real(value):
"""Returns a Real constant with the given value."""
return get_env().formula_manager.Real(value)
def TRUE():
"""Returns the Boolean constant TRUE."""
return get_env().formula_manager.TRUE()
def FALSE():
"""Returns the Boolean constant FALSE."""
return get_env().formula_manager.FALSE()
def And(*args):
r""".. math:: \varphi_0 \land \cdots \land \varphi_n """
return get_env().formula_manager.And(*args)
def Or(*args):
r""".. math:: \varphi_0 \lor \cdots \lor \varphi_n """
return get_env().formula_manager.Or(*args)
def Plus(*args):
r""".. math:: \varphi_0 + \cdots + \varphi_n """
return get_env().formula_manager.Plus(*args)
def ToReal(formula):
"""Explicit cast of a term into a Real term."""
return get_env().formula_manager.ToReal(formula)
def AtMostOne(*args):
"""
Cardinality constraint over a set of boolean expressions.
At most one can be true at anytime.
"""
return get_env().formula_manager.AtMostOne(*args)
def ExactlyOne(*args):
"""Given a set of boolean expressions requires that exactly one holds."""
return get_env().formula_manager.ExactlyOne(*args)
def AllDifferent(*args):
"""Given a set of non-boolean expressions, requires that each of them
has value different from all the others
"""
return get_env().formula_manager.AllDifferent(*args)
def Xor(left, right):
"""Returns the XOR of left and right"""
return get_env().formula_manager.Xor(left, right)
def Min(*args):
"""
Minimum over a set of real or integer terms
"""
return get_env().formula_manager.Min(*args)
def Max(*args):
"""
Maximum over a set of real or integer terms
"""
return get_env().formula_manager.Max(*args)
# Bit Vectors
def BV(value, width=None):
"""Returns a constant of type BitVector.
value can be either:
- a string of 0s and 1s
- a string starting with "#b" followed by a sequence of 0s and 1s
- an integer number s.t. 0 <= value < 2**width
In order to create the BV representation of a signed integer,
the SBV() method shall be used.
"""
return get_env().formula_manager.BV(value, width)
def SBV(value, width=None):
"""Returns a constant of type BitVector interpreting the sign.
If the specified value is an integer, it is converted in the
2-complement representation of the given number, otherwise the
behavior is the same as BV().
"""
return get_env().formula_manager.SBV(value, width)
def BVOne(width=None):
"""Returns the unsigned one constant BitVector."""
return get_env().formula_manager.BVOne(width)
def BVZero(width=None):
"""Returns the zero constant BitVector."""
return get_env().formula_manager.BVZero(width)
def BVNot(formula):
"""Returns the bitvector Not(bv)"""
return get_env().formula_manager.BVNot(formula)
def BVAnd(left, right):
"""Returns the Bit-wise AND of two bitvectors of the same size."""
return get_env().formula_manager.BVAnd(left, right)
def BVOr(left, right):
"""Returns the Bit-wise OR of two bitvectors of the same size."""
return get_env().formula_manager.BVOr(left, right)
def BVXor(left, right):
"""Returns the Bit-wise XOR of two bitvectors of the same size."""
return get_env().formula_manager.BVXor(left, right)
def BVConcat(left, right):
"""Returns the Concatenation of the two BVs"""
return get_env().formula_manager.BVConcat(left, right)
def BVExtract(formula, start=0, end=None):
"""Returns the slice of formula from start to end (inclusive)."""
return get_env().formula_manager.BVExtract(formula, start=start, end=end)
def BVULT(left, right):
"""Returns the formula left < right."""
return get_env().formula_manager.BVULT(left, right)
def BVUGT(left, right):
"""Returns the formula left > right."""
return get_env().formula_manager.BVUGT(left, right)
def BVULE(left, right):
"""Returns the formula left <= right."""
return get_env().formula_manager.BVULE(left, right)
def BVUGE(left, right):
"""Returns the formula left >= right."""
return get_env().formula_manager.BVUGE(left, right)
def BVNeg(formula):
"""Returns the arithmetic negation of the BV."""
return get_env().formula_manager.BVNeg(formula)
def BVAdd(left, right):
"""Returns the sum of two BV."""
return get_env().formula_manager.BVAdd(left, right)
def BVSub(left, right):
"""Returns the difference of two BV."""
return get_env().formula_manager.BVSub(left, right)
def BVMul(left, right):
"""Returns the product of two BV."""
return get_env().formula_manager.BVMul(left, right)
def BVUDiv(left, right):
"""Returns the division of the two BV."""
return get_env().formula_manager.BVUDiv(left, right)
def BVURem(left, right):
"""Returns the reminder of the two BV."""
return get_env().formula_manager.BVURem(left, right)
def BVLShl(left, right):
"""Returns the logical left shift the BV."""
return get_env().formula_manager.BVLShl(left, right)
def BVLShr(left, right):
"""Returns the logical right shift the BV."""
return get_env().formula_manager.BVLShr(left, right)
def BVRol(formula, steps):
"""Returns the LEFT rotation of the BV by the number of steps."""
return get_env().formula_manager.BVRol(formula, steps)
def BVRor(formula, steps):
"""Returns the RIGHT rotation of the BV by the number of steps."""
return get_env().formula_manager.BVRor(formula, steps)
def BVZExt(formula, increase):
"""Returns the extension of the BV
New bits are set to zero.
"""
return get_env().formula_manager.BVZExt(formula, increase)
def BVSExt(formula, increase):
"""Returns the signed extension of the BV
New bits are set according to the most-significant-bit.
"""
return get_env().formula_manager.BVSExt(formula, increase)
def BVSLT(left, right):
"""Returns the SIGNED LOWER-THAN comparison for BV."""
return get_env().formula_manager.BVSLT(left, right)
def BVSLE(left, right):
"""Returns the SIGNED LOWER-THAN-OR-EQUAL-TO comparison for BV."""
return get_env().formula_manager.BVSLE(left, right)
def BVSGT(left, right):
"""Returns the SIGNED GREATER-THAN comparison for BV."""
return get_env().formula_manager.BVSGT(left, right)
def BVSGE(left, right):
"""Returns the SIGNED GREATER-THAN-OR-EQUAL-TO comparison for BV."""
return get_env().formula_manager.BVSGE(left, right)
def BVSDiv(left, right):
"""Returns the SIGNED DIVISION of left by right"""
return get_env().formula_manager.BVSDiv(left, right)
def BVSRem(left, right):
"""Returns the SIGNED REMAINDER of left divided by right"""
return get_env().formula_manager.BVSRem(left, right)
def BVComp(left, right):
"""Returns a BV of size 1 equal to 0 if left is equal to right,
otherwise 1 is returned."""
return get_env().formula_manager.BVComp(left, right)
def BVAShr(left, right):
"""Returns the RIGHT arithmetic rotation of the left BV by the number
of steps specified by the right BV."""
return get_env().formula_manager.BVAShr(left, right)
#### Shortcuts for Solvers Factory #####
def Solver(quantified=False, name=None, logic=None):
"""Returns a solver."""
return get_env().factory.Solver(quantified=quantified,
name=name,
logic=logic)
def UnsatCoreSolver(quantified=False, name=None, logic=None,
unsat_cores_mode="all"):
"""Returns a solver supporting unsat core extraction."""
return get_env().factory.UnsatCoreSolver(quantified=quantified,
name=name,
logic=logic,
unsat_cores_mode=unsat_cores_mode)
def QuantifierEliminator(name=None, logic=None):
"""Returns a quantifier eliminator"""
return get_env().factory.QuantifierEliminator(name=name, logic=logic)
def Interpolator(name=None, logic=None):
"""Returns an interpolator"""
return get_env().factory.Interpolator(name=name, logic=logic)
def is_sat(formula, solver_name=None, logic=None):
""" Returns whether a formula is satisfiable.
:param formula: The formula to check satisfiability
:type formula: FNode
:param solver_name: Specify the name of the solver to be used.
:param logic: Specify the logic that is going to be used.
:returns: Whether the formula is SAT or UNSAT.
:rtype: bool
"""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during is_sat")
formula = env.formula_manager.normalize(formula)
return env.factory.is_sat(formula,
solver_name=solver_name,
logic=logic)
def get_model(formula, solver_name=None, logic=None):
""" Similar to :py:func:`is_sat` but returns a model if the formula is
satisfiable, otherwise None."""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during get_model")
formula = env.formula_manager.normalize(formula)
return env.factory.get_model(formula,
solver_name=solver_name,
logic=logic)
def get_implicant(formula, solver_name=None, logic=None):
"""Returns a formula f_i such that Implies(f_i, formula) is valid or None
if formula is unsatisfiable.
if complete is set to true, all the variables appearing in the
formula are forced to appear in f_i.
"""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during get_model")
formula = env.formula_manager.normalize(formula)
return env.factory.get_implicant(formula,
solver_name=solver_name,
logic=logic)
def get_unsat_core(clauses, solver_name=None, logic=None):
"""Similar to :py:func:`get_model` but returns the unsat core of the
conjunction of the input clauses"""
env = get_env()
if any(c not in env.formula_manager for c in clauses):
warnings.warn("Warning: Contextualizing formula during get_model")
clauses = [env.formula_manager.normalize(c) for c in clauses]
return env.factory.get_unsat_core(clauses,
solver_name=solver_name,
logic=logic)
def is_valid(formula, solver_name=None, logic=None):
"""Similar to :py:func:`is_sat` but checks validity."""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during is_valid")
formula = env.formula_manager.normalize(formula)
return env.factory.is_valid(formula,
solver_name=solver_name,
logic=logic)
def is_unsat(formula, solver_name=None, logic=None):
"""Similar to :py:func:`is_sat` but checks unsatisfiability."""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during is_unsat")
formula = env.formula_manager.normalize(formula)
return env.factory.is_unsat(formula,
solver_name=solver_name,
logic=logic)
def qelim(formula, solver_name=None, logic=None):
"""Performs quantifier elimination of the given formula."""
env = get_env()
if formula not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during is_unsat")
formula = env.formula_manager.normalize(formula)
return env.factory.qelim(formula,
solver_name=solver_name,
logic=logic)
def binary_interpolant(formula_a, formula_b, solver_name=None, logic=None):
"""Computes an interpolant of (formula_a, formula_b). Returns None
if the conjunction is satisfiable"""
env = get_env()
formulas = [formula_a, formula_b]
for i, f in enumerate(formulas):
if f not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during "
"binary_interpolant")
formulas[i] = env.formula_manager.normalize(f)
return env.factory.binary_interpolant(formulas[0], formulas[1],
solver_name=solver_name,
logic=logic)
def sequence_interpolant(formulas, solver_name=None, logic=None):
"""Computes a sequence interpolant of the formulas. Returns None
if the conjunction is satisfiable"""
env = get_env()
formulas = list(formulas)
for i, f in enumerate(formulas):
if f not in env.formula_manager:
warnings.warn("Warning: Contextualizing formula during "
"sequence_interpolant")
formulas[i] = env.formula_manager.normalize(f)
return env.factory.sequence_interpolant(formulas,
solver_name=solver_name,
logic=logic)
def read_configuration(config_filename, environment=None):
"""
Reads the pysmt configuration of the given file path and applies
it on the specified environment. If no environment is specified,
the top-level environment will be used.
"""
if environment is None:
environment = get_env()
config.configure_environment(config_filename, environment)
def write_configuration(config_filename, environment=None):
"""
Dumps the current pysmt configuration to the specified file path
"""
if environment is None:
environment = get_env()
config.write_environment_configuration(config_filename, environment)
| |
# -*- coding: utf-8 -*-
import numpy as np
from cea.demand import control_ventilation_systems, constants, control_heating_cooling_systems
from cea.utilities import physics
from cea.constants import HOURS_IN_YEAR
__author__ = "Gabriel Happle"
__copyright__ = "Copyright 2016, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Gabriel Happle"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "thomas@arch.ethz.ch"
__status__ = "Production"
# THIS SCRIPT IS USED TO CALCULATE ALL VENTILATION PROPERTIES (AIR FLOWS AND THEIR TEMPERATURES)
# FOR CALCULATION OF THE VENTILATION HEAT TRANSFER H_VE USED IN THE ISO 13790 CALCULATION PROCEDURE
# get values of global variables
ETA_REC = constants.ETA_REC # constant efficiency of Heat recovery
DELTA_P_DIM = constants.DELTA_P_DIM
H_F = constants.H_F
def calc_air_mass_flow_mechanical_ventilation(bpr, tsd, t):
"""
Calculates minimum mass flow rate of mechanical ventilation at time step t according to ventilation control options and
building systems properties
Author: Gabriel Happle
Date: 01/2017
:param bpr: Building properties row object
:type bpr: cea.demand.thermal_loads.BuildingPropertiesRow
:param tsd: Timestep data
:type tsd: Dict[str, numpy.ndarray]
:param t: time step [0..HOURS_IN_YEAR]
:type t: int
:return: updates tsd
"""
# if has mechanical ventilation and not night flushing : m_ve_mech = m_ve_schedule
if control_ventilation_systems.is_mechanical_ventilation_active(bpr, tsd, t) \
and not control_ventilation_systems.is_night_flushing_active(bpr, tsd, t)\
and not control_ventilation_systems.is_economizer_active(bpr, tsd, t):
# mechanical ventilation fulfills requirement - minimum ventilation provided by infiltration (similar to CO2 sensor)
m_ve_mech = max(tsd['m_ve_required'][t] - tsd['m_ve_inf'][t], 0.0)
elif control_ventilation_systems.has_mechanical_ventilation(bpr) \
and control_ventilation_systems.is_night_flushing_active(bpr, tsd, t):
# night flushing according to strategy
# ventilation with maximum capacity = maximum required ventilation rate
m_ve_mech = tsd['m_ve_required'].max() # TODO: some night flushing rule
elif control_ventilation_systems.has_mechanical_ventilation(bpr) \
and control_ventilation_systems.is_economizer_active(bpr, tsd, t):
# economizer according to strategy
# ventilation with maximum capacity = maximum required ventilation rate
m_ve_mech = tsd['m_ve_required'].max()
elif not control_ventilation_systems.is_mechanical_ventilation_active(bpr, tsd, t):
# mechanical ventilation is turned off
m_ve_mech = 0.0
else:
raise ValueError
tsd['m_ve_mech'][t] = m_ve_mech
return
def calc_air_mass_flow_window_ventilation(bpr, tsd, t):
"""
Calculates mass flow rate of window ventilation at time step t according to ventilation control options and
building systems properties
Author: Gabriel Happle
Date: 01/2017
:param bpr: Building properties row object
:type bpr: cea.demand.thermal_loads.BuildingPropertiesRow
:param tsd: Timestep data
:type tsd: Dict[str, numpy.ndarray]
:param t: time step [0..HOURS_IN_YEAR]
:type t: int
:return: updates tsd
"""
# if has window ventilation and not special control : m_ve_window = m_ve_schedule
if control_ventilation_systems.is_window_ventilation_active(bpr, tsd, t) \
and not control_ventilation_systems.is_night_flushing_active(bpr, tsd, t):
# window ventilation fulfills requirement (control by occupants similar to CO2 sensor)
m_ve_window = max(tsd['m_ve_required'][t] - tsd['m_ve_inf'][t], 0)
# TODO: check window ventilation calculation, there are some methods in SIA2044
elif control_ventilation_systems.is_window_ventilation_active(bpr, tsd, t) \
and control_ventilation_systems.is_night_flushing_active(bpr, tsd, t):
# ventilation with maximum capacity = maximum required ventilation rate
m_ve_window = tsd['m_ve_required'].max() # TODO: implement some night flushing rule
elif not control_ventilation_systems.is_window_ventilation_active(bpr, tsd, t):
m_ve_window = 0
else:
raise ValueError
tsd['m_ve_window'][t] = m_ve_window
return
def calc_m_ve_leakage_simple(bpr, tsd):
"""
Calculates mass flow rate of leakage at time step t according to ventilation control options and
building systems properties
Estimation of infiltration air volume flow rate according to Eq. (3) in DIN 1946-6
Author: Gabriel Happle
Date: 01/2017
:param bpr: Building properties row object
:type bpr: cea.demand.thermal_loads.BuildingPropertiesRow
:param tsd: Timestep data
:type tsd: Dict[str, numpy.ndarray]
:return: updates tsd
"""
# 'flat rate' infiltration considered for all buildings
# get properties
n50 = bpr.architecture.n50
area_f = bpr.rc_model['Af']
# estimation of infiltration air volume flow rate according to Eq. (3) in DIN 1946-6
n_inf = 0.5 * n50 * (DELTA_P_DIM / 50) ** (2 / 3) # [air changes per hour] m3/h.m2
infiltration = H_F * area_f * n_inf * 0.000277778 # m3/s
tsd['m_ve_inf'] = infiltration * physics.calc_rho_air(tsd['T_ext'][:]) # (kg/s)
return
def calc_theta_ve_mech(bpr, tsd, t):
"""
Calculates supply temperature of mechanical ventilation system according to ventilation control options and
building systems properties
Author: Gabriel Happle
Date: 01/2017
:param bpr: Building properties row object
:type bpr: cea.demand.thermal_loads.BuildingPropertiesRow
:param tsd: Timestep data
:type tsd: Dict[str, numpy.ndarray]
:param t: time step [0..HOURS_IN_YEAR]
:type t: int
:return: updates tsd
"""
if control_ventilation_systems.is_mechanical_ventilation_heat_recovery_active(bpr, tsd, t):
theta_eta_rec = tsd['T_int'][t-1]
theta_ve_mech = tsd['T_ext'][t] + ETA_REC * (theta_eta_rec - tsd['T_ext'][t]) # TODO: some HEX formula
# if no heat recovery: theta_ve_mech = theta_ext
elif not control_ventilation_systems.is_mechanical_ventilation_heat_recovery_active(bpr, tsd, t):
theta_ve_mech = tsd['T_ext'][t]
else:
theta_ve_mech = np.nan
print('Warning! Unknown HEX status')
tsd['theta_ve_mech'][t] = theta_ve_mech
return
def calc_m_ve_required(tsd):
"""
Calculate required outdoor air ventilation rate according to occupancy
Author: Legacy
Date: old
:param tsd: Timestep data
:type tsd: Dict[str, numpy.ndarray]
:return: updates tsd
"""
rho_kgm3 = physics.calc_rho_air(tsd['T_ext'][:])
tsd['m_ve_required'] = np.array(tsd['ve_lps']) * rho_kgm3 * 0.001 # kg/s
return
| |
# coding: utf-8
"""
mana
~~~~
the missing startproject command for flask
Usage:
mana init <project_name>
mana startproject <project_name>
mana blueprint <blueprint_name>
mana version show version
Options:
mana --help: help information
Install:
$ pip install mana (--upgrade)
"""
import os
# operators
from operators import _mkdir_p
from operators import init_code
# templates
from templates.manage import _manage_basic_code, _manage_admin_code
from templates.requirement import _requirement_code, _requirement_admin_code
from templates.views import _views_basic_code, _views_blueprint_code
from templates.forms import _forms_basic_code
from templates.init import _init_basic_code, _init_blueprint_code, \
_init_admin_code
from templates.config import _config_sql_code
from templates.models import _models_admin_code
from templates.admin import _admin_views_code, _admin_index_html_code, \
_admin_logout_html_code
from templates.auth import _auth_forms_code, _auth_views_code, \
_auth_login_html_code, _auth_login_css_code
# logging
import logging
from logging import StreamHandler, DEBUG
# logger
logger = logging.getLogger(__name__)
logger.setLevel(DEBUG)
logger.addHandler(StreamHandler())
# logging info
def warning_path_exist(path):
"""
send warning msg if path exist
"""
logger.warning('''\033[31m{Warning}\033[0m
==> \033[32m%s\033[0m\n exist
==> please change the project name,
==> and try again !''' % path)
def start_init_info(path):
"""
start init msg
"""
if os.path.isdir(path):
warning_path_exist(path)
exit(1)
else:
logger.info('''\033[33m{Info}\033[0m
==> start init your flask project [on]
==> \033[32m%s\033[0m\n''' % path)
def init_done_info():
"""
init done
"""
logger.info('''\033[33m{Info}\033[0m
==> init your flask project done !''')
# create
def create_templates_static_files(app_path):
"""
create templates and static
"""
templates_path = os.path.join(app_path, 'templates')
static_path = os.path.join(app_path, 'static')
_mkdir_p(templates_path)
_mkdir_p(static_path)
# create {img, css, js}
os.chdir(static_path)
img_path = os.path.join(static_path, 'img')
css_path = os.path.join(static_path, 'css')
js_path = os.path.join(static_path, 'js')
_mkdir_p(img_path)
_mkdir_p(css_path)
_mkdir_p(js_path)
return css_path, templates_path
def create_blueprint(app_path, blueprint, views_code, forms_code, templates_path):
"""
create blueprint
"""
blueprint_path = os.path.join(app_path, blueprint)
_mkdir_p(blueprint_path)
# create blueprint files
os.chdir(blueprint_path)
init_code('__init__.py', _init_blueprint_code % (blueprint, blueprint))
init_code('views.py', views_code)
init_code('forms.py', forms_code)
# main blueprint templates
os.chdir(templates_path)
blueprint_templates_path = os.path.join(templates_path, blueprint)
_mkdir_p(blueprint_templates_path)
return blueprint_templates_path
"""use click:)"""
import click
@click.group()
def cli():
"""
the missing startproject command for Flask
\b
[processes]
virtualenv venv
&& source venv/bin/activate -> create a virtual environment (optional)
pip install -r requirement.txt -> install flask extensions
\b
python manage.py db init
python manage.py db migrate
python manage.py db upgrade -> setup sql database(default database is sqlite)
\b
python manage.py shell -> create roles
>> Role.insert_roles()
>> quit()
\b
python manage.py admin -> create admin user
python manage.py runserver(-d) -> run project(in debug mode)'''
"""
pass
@click.command()
@click.argument('project_name')
def init(project_name):
"""
build a minimal flask project
"""
# the destination path
dst_path = os.path.join(os.getcwd(), project_name)
start_init_info(dst_path)
# create dst path
_mkdir_p(dst_path)
os.chdir(dst_path)
# create files
init_code('manage.py', _manage_basic_code)
init_code('requirement.txt', _requirement_code)
# create app/
app_path = os.path.join(dst_path, 'app')
_mkdir_p(app_path)
os.chdir(app_path)
# create files
init_code('views.py', _views_basic_code)
init_code('forms.py', _forms_basic_code)
init_code('__init__.py', _init_basic_code)
create_templates_static_files(app_path)
init_done_info()
@click.command()
@click.argument('blueprint_name')
def blueprint(blueprint_name):
"""
create and register a blueprint
"""
app = os.getcwd().split('/')[-1]
if app != 'app':
logger.warning('''\033[31m{Warning}\033[0m
==> your current path is \033[32m%s\033[0m\n
==> please create your blueprint under app folder!''' % os.getcwd())
exit(1)
# destination path
dst_path = os.path.join(os.getcwd(), blueprint_name)
if os.path.isdir(dst_path):
logger.warning('''\033[31m{Warning}\033[0m
==> bluprint \033[32m%s\033[0m\n exist
==> please try again !''' % dst_path)
exit(1)
# create dst_path
_mkdir_p(dst_path)
# change dir
os.chdir(dst_path)
# create files
init_code('__init__.py', _init_blueprint_code %
(blueprint_name, blueprint_name))
init_code('views.py', _views_blueprint_code %
(blueprint_name, blueprint_name))
init_code('forms.py', _forms_basic_code)
# register auth in app
os.chdir(os.path.join(dst_path, '..'))
with open('__init__.py', 'r+') as f:
prev = pos = 0
while f.readline():
prev, pos = pos, f.tell()
f.seek(prev)
f.write(
'\nfrom %s import %s\napp.register_blueprint(%s, url_prefix="/%s")\n\n'
% (
blueprint_name, blueprint_name,
blueprint_name, blueprint_name
)
)
# create blueprint templates
templates_path = os.path.join(os.getcwd(), 'templates')
os.chdir(templates_path)
blueprint_templates_path = os.path.join(templates_path, blueprint_name)
_mkdir_p(blueprint_templates_path)
logger.info('''\033[33m{Info}\033[0m: create blueprint done!''')
@click.command()
@click.argument('project_name')
def startproject(project_name):
"""
build a full status project
"""
# the destination path
dst_path = os.path.join(os.getcwd(), project_name)
start_init_info(dst_path)
# create dst path
_mkdir_p(dst_path)
# create project tree
os.chdir(dst_path)
# create files
init_code('manage.py', _manage_admin_code)
init_code('requirement.txt', _requirement_admin_code)
init_code('config.py', _config_sql_code)
# create app/
app_path = os.path.join(dst_path, 'app')
_mkdir_p(app_path)
# create files
os.chdir(app_path)
init_code('models.py', _models_admin_code)
init_code('__init__.py', _init_admin_code)
# create templates and static
css_path, templates_path = create_templates_static_files(app_path)
# create css files
os.chdir(css_path)
init_code('sign.css', _auth_login_css_code)
# create main blueprint
create_blueprint(
app_path,
'main',
_views_blueprint_code % ('main', 'main'),
_forms_basic_code,
templates_path
)
# create auth blueprint
auth_templates_path = create_blueprint(
app_path,
'auth',
_auth_views_code,
_auth_forms_code,
templates_path
)
# create auth templates files
os.chdir(auth_templates_path)
init_code('login.html', _auth_login_html_code)
# create admin site
admin_path = os.path.join(app_path, 'admin')
_mkdir_p(admin_path)
# create admin files
os.chdir(admin_path)
init_code('__init__.py', '')
init_code('views.py', _admin_views_code)
# create admin templates
os.chdir(templates_path)
admin_templates_path = os.path.join(templates_path, 'admin')
_mkdir_p(admin_templates_path)
# create admin templates files
os.chdir(admin_templates_path)
init_code('index.html', _admin_index_html_code)
init_code('logout.html', _admin_logout_html_code)
init_done_info()
@click.command()
@click.argument('module')
def admin(module):
"""add sql modules into admin site"""
# add module into admin site
app = os.getcwd().split('/')[-1]
if app != 'app':
logger.warning('''\033[31m{Warning}\033[0m
==> your current path is \033[32m%s\033[0m\n
==> please add your sql module under app folder!''' % os.getcwd())
exit(1)
admin_path = os.path.join(os.getcwd(), 'admin')
os.chdir(admin_path)
with open('views.py', 'r+') as f:
prev = pos = 0
while f.readline():
prev, pos = pos, f.tell()
f.seek(prev)
f.write(
'\nfrom app.models import %s\nadmin.add_view(ModelView(%s, db.session))'
% (module, module)
)
logger.info('''\033[33m{Info}\033[0m: add module done!''')
@click.command()
def version():
"""mana version"""
click.echo("mana version: 4.9 \/ ")
# mana command set
cli.add_command(init)
cli.add_command(blueprint)
cli.add_command(startproject)
cli.add_command(admin)
cli.add_command(version)
| |
# Copyright (c) 2014-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from cloudify import exceptions as cfy_exc
from cloudify import mocks as cfy_mocks
from vcloud_server_plugin import server
from tests.unittests import test_mock_base
from cloudify.state import current_ctx
class ServerPluginServerSubRoutesMockTestCase(test_mock_base.TestBase):
def test_check_hardware_empty(self):
''' nosing is set '''
server._check_hardware(None, None)
def test_check_hardware_without_cpu(self):
''' without cpu? '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware(0, 10)
def test_check_hardware_much_cpu(self):
''' too mane cpu: 128 cpu? '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware(128, 10)
def test_check_hardware_cpu_is_string(self):
''' cpu is string '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware('not int', 10)
def test_check_hardware_low_memory(self):
''' low memory == 10M '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware(1, 10)
def test_check_hardware_much_memory(self):
''' too much memory 1000G '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware(1, 1024 * 1024)
def test_check_hardware_memory_is_string(self):
''' memory is string '''
with self.assertRaises(cfy_exc.NonRecoverableError):
server._check_hardware(1, 'memory')
def test_check_hardware(self):
server._check_hardware(1, 512)
def test_build_script(self):
with mock.patch('vcloud_server_plugin.server._get_connected_keypairs',
mock.MagicMock(
return_value=[])):
self.assertEqual(None, server._build_script({}, []))
custom = {
'pre_script': 'pre_script',
'post_script': 'post_script',
'public_keys': [{
'key': True
}]
}
with mock.patch('vcloud_server_plugin.server._get_connected_keypairs',
mock.MagicMock(
return_value=[{'key': 'key'}])):
self.assertNotEqual(None, server._build_script(custom, []))
def test_build_public_keys_script(self):
def script_fun(a, b, c, d, e):
return a.append("{}{}{}{}".format(b, c, d, e))
self.assertEqual('', server._build_public_keys_script([], script_fun))
self.assertEqual('', server._build_public_keys_script([
{'key': False}
], script_fun))
self.assertNotEqual('', server._build_public_keys_script([
{'key': True}
], script_fun))
self.assertNotEqual('', server._build_public_keys_script([
{
'key': True,
'user': 'test',
'home': 'home'
}
], script_fun))
def test_creation_validation_empty_settings(self):
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'server': {}
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient',
self.generate_vca()
):
with self.assertRaises(cfy_exc.NonRecoverableError):
server.creation_validation(ctx=fake_ctx, vca_client=None)
def test_creation_validation_external_resource(self):
"""
must run without any errors and check with empty
server description
"""
# unknow resource_id
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'use_external_resource': True
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient',
self.generate_vca()
):
with self.assertRaises(cfy_exc.NonRecoverableError):
server.creation_validation(ctx=fake_ctx, vca_client=None)
# with resource_id
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'use_external_resource': True,
'resource_id': 'ServerName'
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient',
self.generate_vca()
):
server.creation_validation(ctx=fake_ctx, vca_client=None)
def test_creation_validation_settings_wrong_catalog(self):
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'server': {
'catalog': 'unknow',
'template': 'secret'
}
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
self.generate_vca()
):
with self.assertRaises(cfy_exc.NonRecoverableError):
server.creation_validation(ctx=fake_ctx, vca_client=None)
def test_creation_validation_settings_wrong_template(self):
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'server': {
'catalog': 'public',
'template': 'unknow'
}
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
self.generate_vca()
):
with self.assertRaises(cfy_exc.NonRecoverableError):
server.creation_validation(ctx=fake_ctx, vca_client=None)
def test_creation_validation_settings(self):
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'server': {
'catalog': 'public',
'template': 'secret'
}
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
self.generate_vca()
):
server.creation_validation(ctx=fake_ctx, vca_client=None)
def test_isDhcpAvailable(self):
client = self.generate_client()
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={
'server': {
'catalog': 'unknow',
'template': 'secret'
},
'vcloud_config': {
'vdc': 'vdc_name'
}
},
provider_context={}
)
current_ctx.set(fake_ctx)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
True, server._isDhcpAvailable(client, 'bridged')
)
self.assertEqual(
False, server._isDhcpAvailable(client, 'local')
)
self.assertEqual(
True, server._isDhcpAvailable(client, 'vdc_name')
)
def test_get_connected(self):
fake_ctx = self.generate_node_context_with_current_ctx(
relation_node_properties={
"not_test": "not_test"
}
)
self.assertEqual(
server._get_connected(fake_ctx.instance, "test"), []
)
self.assertEqual(
server._get_connected(fake_ctx.instance, "not_test"),
[fake_ctx.instance._relationships[0].target]
)
fake_ctx.instance._relationships = []
# test []
self.assertEqual(
server._get_connected(fake_ctx.instance, "test"), []
)
def test_create_connections_list(self):
# one connection from port, one from network and
# one managment_network
fake_ctx = self.generate_node_context_with_current_ctx(
relation_node_properties={
"not_test": "not_test",
'port': {
'network': 'private_network',
'ip_address': "1.1.1.1",
'mac_address': "hex",
'ip_allocation_mode': 'pool',
'primary_interface': True
},
'network': {
'name': 'some_network'
}
}
)
fake_client = self.generate_client()
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
connection = server._create_connections_list(
ctx=fake_ctx, vca_client=fake_client)
self.assertEqual(
[
{
'network': 'private_network',
'mac_address': 'hex',
'ip_allocation_mode': 'POOL',
'primary_interface': True,
'ip_address': '1.1.1.1',
'nic_order': 0
}, {
'network': 'some_network',
'mac_address': None,
'ip_allocation_mode': 'POOL',
'primary_interface': False,
'ip_address': None,
'nic_order': 0
}, {
'network': '_management_network',
'mac_address': None,
'ip_allocation_mode': 'POOL',
'primary_interface': False,
'ip_address': None,
'nic_order': 0
}
], connection
)
# get network name from first avaible but not primary
fake_ctx = self.generate_node_context_with_current_ctx(
relation_node_properties={
"not_test": "not_test",
'port': {
'network': 'private_network',
'ip_address': "1.1.1.1",
'mac_address': "hex",
'ip_allocation_mode': 'pool',
'primary_interface': False,
'nic_order': 0
}
}
)
fake_client = self.generate_client()
fake_ctx.node.properties['management_network'] = None
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
connection = server._create_connections_list(
ctx=fake_ctx, vca_client=fake_client)
self.assertEqual(
[
{
'ip_address': '1.1.1.1',
'ip_allocation_mode': 'POOL',
'mac_address': 'hex',
'network': 'private_network',
'primary_interface': True,
'nic_order': 0
}
], connection
)
# no connections
fake_ctx = self.generate_node_context_with_current_ctx(
relation_node_properties={
"not_test": "not_test"
}
)
fake_client = self.generate_client()
fake_ctx.node.properties['management_network'] = None
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
server._create_connections_list(ctx=fake_ctx,
vca_client=fake_client)
# one network same as managment + port
fake_ctx = self.generate_node_context_with_current_ctx(
relation_node_properties={
"not_test": "not_test",
'port': {
'network': '_management_network',
'ip_address': "1.1.1.1",
'mac_address': "hex",
'ip_allocation_mode': 'pool',
'primary_interface': True,
'nic_order': 0
},
'network': {
'name': 'some_network'
}
}
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
connection = server._create_connections_list(
ctx=fake_ctx, vca_client=fake_client)
self.assertEqual(
[
{
'ip_address': '1.1.1.1',
'ip_allocation_mode': 'POOL',
'mac_address': 'hex',
'network': '_management_network',
'primary_interface': True,
'nic_order': 0
},
{
'ip_address': None,
'ip_allocation_mode': 'POOL',
'mac_address': None,
'network': 'some_network',
'primary_interface': False,
'nic_order': 0
}
], connection
)
# check dhcp, with no dhcp server
fake_ctx = self.generate_node_context(relation_node_properties={
"not_test": "not_test",
'port': {
'network': '_management_network',
'ip_address': "1.1.1.1",
'mac_address': "hex",
'ip_allocation_mode': 'dhcp',
'primary_interface': True,
'nic_order': 0
},
'network': {
'name': 'some_network'
}
})
# we support case when with dhcpd on vm inside network
# instead use gateway service,
# look to cc676430a1e06e9ac2fd8d0a56b9a414d3232939
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
server._create_connections_list(ctx=fake_ctx,
vca_client=fake_client)
# only managment node
fake_ctx.instance._relationships = []
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
connection = server._create_connections_list(
ctx=fake_ctx, vca_client=fake_client)
self.assertEqual(
[{
'ip_address': None,
'ip_allocation_mode': 'POOL',
'mac_address': None,
'network': '_management_network',
'primary_interface': True,
'nic_order': 0
}],
connection
)
# no networks
fake_ctx.instance._relationships = []
def _generate_fake_client_network(vdc_name, network_name):
return None
fake_client.get_network = _generate_fake_client_network
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
server._create_connections_list(ctx=fake_ctx,
vca_client=fake_client)
def test_get_vm_network_connections(self):
# one connection from port, one from network and
# one managment_network
# empty connection
fake_vapp = self.generate_vapp([])
connections = server._get_vm_network_connections(
fake_vapp
)
self.assertEqual([], connections)
# not connected
fake_vapp = self.generate_vapp([{
'is_connected': False,
'network_name': 'network_name'
}])
connections = server._get_vm_network_connections(
fake_vapp
)
self.assertEqual([], connections)
# connection
fake_vapp = self.generate_vapp([{
'is_connected': True,
'network_name': 'network_name'
}])
connections = server._get_vm_network_connections(
fake_vapp
)
self.assertEqual([
{
'is_connected': True,
'network_name': 'network_name'
}],
connections
)
def test_get_vm_network_connection(self):
# one connection from port, one from network and
# one managment_network
fake_vapp = self.generate_vapp([{
'is_connected': True,
'network_name': 'network_name'
}])
# exist network
connection = server._get_vm_network_connection(
fake_vapp, 'network_name'
)
self.assertEqual(
{
'is_connected': True,
'network_name': 'network_name'
}, connection
)
# not exist network
connection = server._get_vm_network_connection(
fake_vapp, 'other'
)
self.assertEqual(None, connection)
def test_get_state(self):
fake_ctx = self.generate_node_context_with_current_ctx()
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
# connected network_name
fake_client = self.generate_client([{
'is_connected': True,
'is_primary': False,
'network_name': 'network_name',
'ip': '1.1.1.1'
}])
self.assertFalse(server._get_state(ctx=fake_ctx,
vca_client=fake_client))
# not connected network_name
fake_client = self.generate_client([{
'is_connected': False,
'network_name': 'network_name',
'ip': '1.1.1.1'
}])
self.assertTrue(server._get_state(ctx=fake_ctx,
vca_client=fake_client))
# not ip in connected network_name
fake_client = self.generate_client([{
'is_connected': True,
'is_primary': False,
'network_name': 'network_name',
'ip': None
}])
self.assertFalse(server._get_state(ctx=fake_ctx,
vca_client=fake_client))
# with managment_network
fake_client = self.generate_client([{
'is_connected': True,
'is_primary': True,
'network_name': '_management_network',
'ip': '1.1.1.1'
}])
self.assertTrue(server._get_state(ctx=fake_ctx,
vca_client=fake_client))
def test_add_key_script(self):
commands = []
server._add_key_script(commands, "~A~", "~B~", "~C~", "~D~")
self.assertTrue(commands)
# check create directory .ssh
self.assertTrue("~A~" in commands[0])
self.assertTrue("~B~" in commands[0])
self.assertTrue("~C~" in commands[0])
# inject value to key file
self.assertTrue("~C~" in commands[0])
self.assertTrue("~D~" in commands[1])
def test_get_connected_keypairs(self):
# empty list of relationships
fake_ctx = self.generate_node_context_with_current_ctx()
fake_ctx.instance._relationships = None
self.assertEqual([], server._get_connected_keypairs(ctx=fake_ctx))
# exist some content
relationship = self.generate_relation_context()
runtime_properties = {'public_key': "a"}
relationship.target.instance.runtime_properties = runtime_properties
fake_ctx.instance._relationships = [relationship]
self.assertEqual(
server._get_connected_keypairs(ctx=fake_ctx), ["a"]
)
def test_is_primary_connection_has_ip(self):
# no network info at all
vapp = mock.MagicMock()
vapp.get_vms_network_info = mock.MagicMock(return_value=False)
self.assertTrue(server._is_primary_connection_has_ip(vapp))
# empty list of connections
vapp.get_vms_network_info = mock.MagicMock(return_value=[None])
self.assertTrue(server._is_primary_connection_has_ip(vapp))
# exist connection, but without ip
vapp.get_vms_network_info = mock.MagicMock(return_value=[[
{'is_connected': False}
]])
self.assertFalse(server._is_primary_connection_has_ip(vapp))
# everything connected
vapp.get_vms_network_info = mock.MagicMock(return_value=[[{
'is_connected': True,
'is_primary': True,
'ip': '127.0.0.1'
}]])
self.assertTrue(server._is_primary_connection_has_ip(vapp))
# connected but to different port
vapp.get_vms_network_info = mock.MagicMock(return_value=[[{
'is_connected': True,
'is_primary': False,
'ip': '127.0.0.1'
}, {
'is_connected': True,
'is_primary': True,
'ip': None
}]])
self.assertFalse(server._is_primary_connection_has_ip(vapp))
def test_remove_key_script(self):
commands = []
server._remove_key_script(
commands, "super!", ".ssh!", "somekey", "!**! !@^"
)
self.assertEqual(
commands, [' sed -i /!@^/d somekey']
)
if __name__ == '__main__':
unittest.main()
| |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' heron.py '''
import logging
import tornado.httpclient
import tornado.gen
from tornado.options import options
from fetch import fetch_url_as_json
from query import QueryHandler
# pylint: disable=bad-whitespace
CLUSTER_URL_FMT = "%s/clusters"
TOPOLOGIES_URL_FMT = "%s/topologies"
EXECUTION_STATE_URL_FMT = "%s/executionstate" % TOPOLOGIES_URL_FMT
LOGICALPLAN_URL_FMT = "%s/logicalplan" % TOPOLOGIES_URL_FMT
PHYSICALPLAN_URL_FMT = "%s/physicalplan" % TOPOLOGIES_URL_FMT
SCHEDULER_LOCATION_URL_FMT = "%s/schedulerlocation" % TOPOLOGIES_URL_FMT
METRICS_URL_FMT = "%s/metrics" % TOPOLOGIES_URL_FMT
METRICS_QUERY_URL_FMT = "%s/metricsquery" % TOPOLOGIES_URL_FMT
METRICS_TIMELINE_URL_FMT = "%s/metricstimeline" % TOPOLOGIES_URL_FMT
EXCEPTIONS_URL_FMT = "%s/exceptions" % TOPOLOGIES_URL_FMT
EXCEPTION_SUMMARY_URL_FMT = "%s/exceptionsummary" % TOPOLOGIES_URL_FMT
INFO_URL_FMT = "%s/info" % TOPOLOGIES_URL_FMT
PID_URL_FMT = "%s/pid" % TOPOLOGIES_URL_FMT
JSTACK_URL_FMT = "%s/jstack" % TOPOLOGIES_URL_FMT
JMAP_URL_FMT = "%s/jmap" % TOPOLOGIES_URL_FMT
HISTOGRAM_URL_FMT = "%s/histo" % TOPOLOGIES_URL_FMT
FILE_DATA_URL_FMT = "%s/containerfiledata" % TOPOLOGIES_URL_FMT
FILE_DOWNLOAD_URL_FMT = "%s/containerfiledownload" % TOPOLOGIES_URL_FMT
FILESTATS_URL_FMT = "%s/containerfilestats" % TOPOLOGIES_URL_FMT
capacity = "DIVIDE(" \
" DEFAULT(0," \
" MULTIPLY(" \
" TS({0},{1},__execute-count/default)," \
" TS({0},{1},__execute-latency/default)" \
" )" \
" )," \
" 60000000000" \
")"
failures = "DEFAULT(0," \
" DIVIDE(" \
" TS({0},{1},__fail-count/default)," \
" SUM(" \
" DEFAULT(1, TS({0},{1},__execute-count/default))," \
" DEFAULT(0, TS({0},{1},__fail-count/default))" \
" )" \
" )" \
")"
cpu = "DEFAULT(0, TS({0},{1},__jvm-process-cpu-load))"
memory = "DIVIDE(" \
" DEFAULT(0, TS({0},{1},__jvm-memory-used-mb))," \
" DEFAULT(1, TS({0},{1},__jvm-memory-mb-total))" \
")"
gc = "RATE(TS({0},{1},__jvm-gc-collection-time-ms))"
backpressure = "DEFAULT(0, TS(__stmgr__,*,__time_spent_back_pressure_by_compid/{0}))"
queries = dict(
cpu=cpu,
capacity=capacity,
failures=failures,
memory=memory,
gc=gc,
backpressure=backpressure
)
def get_tracker_endpoint():
'''
Get the endpoint for heron tracker
:return:
'''
return options.tracker_url
def create_url(fmt):
'''
Given an URL format, substitute with tracker service endpoint
:param fmt:
:return:
'''
return fmt % get_tracker_endpoint()
@tornado.gen.coroutine
def get_clusters():
'''
:return:
'''
request_url = create_url(CLUSTER_URL_FMT)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_topologies():
'''
Get the list of topologies given a data center from heron tracker
:return:
'''
request_url = create_url(TOPOLOGIES_URL_FMT)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_topologies_states():
'''
Get the list of topologies and their states
:return:
'''
request_url = create_url(TOPOLOGIES_URL_FMT) + "/states"
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
@tornado.gen.coroutine
def _get_topologies(cluster, role=None, env=None):
endpoint = create_url(TOPOLOGIES_URL_FMT)
params = dict(cluster=cluster)
if role is not None:
params['role'] = role
if env is not None:
params['environ'] = env
request_url = tornado.httputil.url_concat(endpoint, params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
def get_cluster_topologies(cluster):
'''
Get the list of topologies given a cluster
:param cluster:
:return:
'''
return _get_topologies(cluster)
################################################################################
def get_cluster_role_topologies(cluster, role):
'''
Get the list of topologies given a cluster submitted by a given role
:param cluster:
:param role:
:return:
'''
return _get_topologies(cluster, role=role)
################################################################################
def get_cluster_role_env_topologies(cluster, role, env):
'''
Get the list of topologies given a cluster submitted by a given role under a given environment
:param cluster:
:param role:
:param env:
:return:
'''
return _get_topologies(cluster, role=role, env=env)
################################################################################
@tornado.gen.coroutine
def get_execution_state(cluster, environ, topology, role=None):
'''
Get the execution state of a topology in a cluster
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(create_url(EXECUTION_STATE_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_logical_plan(cluster, environ, topology, role=None):
'''
Get the logical plan state of a topology in a cluster
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(LOGICALPLAN_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_comps(cluster, environ, topology, role=None):
'''
Get the list of component names for the topology from Heron Nest
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(LOGICALPLAN_URL_FMT), params)
lplan = yield fetch_url_as_json(request_url)
comps = lplan['spouts'].keys() + lplan['bolts'].keys()
raise tornado.gen.Return(comps)
################################################################################
@tornado.gen.coroutine
def get_instances(cluster, environ, topology, role=None):
'''
Get the list of instances for the topology from Heron Nest
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(PHYSICALPLAN_URL_FMT), params)
pplan = yield fetch_url_as_json(request_url)
instances = pplan['instances'].keys()
raise tornado.gen.Return(instances)
################################################################################
@tornado.gen.coroutine
def get_physical_plan(cluster, environ, topology, role=None):
'''
Get the physical plan state of a topology in a cluster from tracker
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(PHYSICALPLAN_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_scheduler_location(cluster, environ, topology, role=None):
'''
Get the scheduler location of a topology in a cluster from tracker
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(cluster=cluster, environ=environ, topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(SCHEDULER_LOCATION_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_component_exceptionsummary(cluster, environ, topology, component, role=None):
'''
Get summary of exception for a component
:param cluster:
:param environ:
:param topology:
:param component:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(EXCEPTION_SUMMARY_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_component_exceptions(cluster, environ, topology, component, role=None):
'''
Get exceptions for 'component' for 'topology'
:param cluster:
:param environ:
:param topology:
:param component:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(EXCEPTIONS_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_comp_instance_metrics(cluster, environ, topology, component,
metrics, instances, time_range, role=None):
'''
Get the metrics for some instances of a topology from tracker
:param cluster:
:param environ:
:param topology:
:param component:
:param metrics: dict of display name to cuckoo name
:param instances:
:param time_range: 2-tuple consisting of start and end of range
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
# form the fetch url
request_url = tornado.httputil.url_concat(
create_url(METRICS_URL_FMT), params)
# convert a single instance to a list, if needed
all_instances = instances if isinstance(instances, list) else [instances]
# append each metric to the url
for _, metric_name in metrics.items():
request_url = tornado.httputil.url_concat(request_url, dict(metricname=metric_name[0]))
# append each instance to the url
for i in all_instances:
request_url = tornado.httputil.url_concat(request_url, dict(instance=i))
# append the time interval to the url
request_url = tornado.httputil.url_concat(request_url, dict(interval=time_range[1]))
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_comp_metrics(cluster, environ, topology, component,
instances, metricnames, time_range, role=None):
'''
Get the metrics for all the instances of a topology from Heron Nest
:param cluster:
:param environ:
:param topology:
:param component:
:param instances:
:param metricnames: dict of display name to cuckoo name
:param time_range: 2-tuple consisting of start and end of range
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
# form the url
request_url = tornado.httputil.url_concat(
create_url(METRICS_URL_FMT), params)
# append each metric to the url
for metric_name in metricnames:
request_url = tornado.httputil.url_concat(request_url, dict(metricname=metric_name))
# append each instance to the url
for instance in instances:
request_url = tornado.httputil.url_concat(request_url, dict(instance=instance))
# append the time interval to the url
request_url = tornado.httputil.url_concat(request_url, dict(interval=time_range[1]))
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_metrics(cluster, environment, topology, timerange, query, role=None):
'''
Get the metrics for a topology from tracker
:param cluster:
:param environment:
:param topology:
:param timerange:
:param query:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environment,
topology=topology,
starttime=timerange[0],
endtime=timerange[1],
query=query)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(METRICS_QUERY_URL_FMT), params
)
logging.info("get_metrics %s", request_url)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
################################################################################
@tornado.gen.coroutine
def get_comp_metrics_timeline(cluster, environ, topology, component,
instances, metricnames, time_range, role=None):
'''
Get the minute-by-minute metrics for all instances of a topology from tracker
:param cluster:
:param environ:
:param topology:
:param component:
:param instances:
:param metricnames: dict of display name to cuckoo name
:param time_range: 2-tuple consisting of start and end of range
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
# form the url
request_url = tornado.httputil.url_concat(create_url(METRICS_TIMELINE_URL_FMT), params)
if role is not None:
request_url = tornado.httputil.url_concat(request_url, dict(role=role))
# append each metric to the url
for metric_name in metricnames:
request_url = tornado.httputil.url_concat(request_url, dict(metricname=metric_name))
# append each instance to the url
for instance in instances:
request_url = tornado.httputil.url_concat(request_url, dict(instance=instance))
# append the time interval to the url
request_url = tornado.httputil.url_concat(
request_url, dict(starttime=time_range[0], endtime=time_range[1]))
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
@tornado.gen.coroutine
def get_topology_info(cluster, environ, topology, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(create_url(INFO_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Get pid of the instance
@tornado.gen.coroutine
def get_instance_pid(cluster, environ, topology, instance, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param instance:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
instance=instance)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(create_url(PID_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Get jstack of instance
@tornado.gen.coroutine
def get_instance_jstack(cluster, environ, topology, instance, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param instance:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
instance=instance)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(JSTACK_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Get histogram of active memory objects.
@tornado.gen.coroutine
def get_instance_mem_histogram(cluster, environ, topology, instance, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param instance:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
instance=instance)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(HISTOGRAM_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Call heap dump for an instance and save it at /tmp/heap.bin
@tornado.gen.coroutine
def run_instance_jmap(cluster, environ, topology, instance, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param instance:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
instance=instance)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(JMAP_URL_FMT), params)
if role is not None:
request_url = tornado.httputil.url_concat(request_url, dict(role=role))
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Get a url to download a file from the container
def get_container_file_download_url(cluster, environ, topology, container,
path, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param container:
:param path:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
container=container,
path=path)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(FILE_DOWNLOAD_URL_FMT), params)
if role is not None:
request_url = tornado.httputil.url_concat(request_url, dict(role=role))
return request_url
# Get file data from the container
@tornado.gen.coroutine
def get_container_file_data(cluster, environ, topology, container,
path, offset, length, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param container:
:param path:
:param offset:
:param length:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
container=container,
path=path,
offset=offset,
length=length)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(FILE_DATA_URL_FMT), params)
if role is not None:
request_url = tornado.httputil.url_concat(request_url, dict(role=role))
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
# Get filestats
@tornado.gen.coroutine
def get_filestats(cluster, environ, topology, container, path, role=None):
'''
:param cluster:
:param environ:
:param topology:
:param container:
:param path:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
container=container,
path=path)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(create_url(FILESTATS_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url)))
class HeronQueryHandler(QueryHandler):
''' HeronQueryHandler '''
@tornado.gen.coroutine
def fetch(self, cluster, metric, topology, component, instance, timerange, environ=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param environ:
:return:
'''
components = [component] if component != "*" else (yield get_comps(cluster, environ, topology))
futures = []
for comp in components:
query = self.get_query(metric, comp, instance)
future = get_metrics(cluster, environ, topology, timerange, query)
futures.append(future)
results = yield futures
timelines = []
for result in results:
timelines.extend(result["timeline"])
result = self.get_metric_response(timerange, timelines, False)
raise tornado.gen.Return(result)
@tornado.gen.coroutine
def fetch_max(self, cluster, metric, topology, component, instance, timerange, environ=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param environ:
:return:
'''
components = [component] if component != "*" else (yield get_comps(cluster, environ, topology))
result = {}
futures = []
for comp in components:
query = self.get_query(metric, comp, instance)
max_query = "MAX(%s)" % query
future = get_metrics(cluster, environ, topology, timerange, max_query)
futures.append(future)
results = yield futures
data = self.compute_max(results)
result = self.get_metric_response(timerange, data, True)
raise tornado.gen.Return(result)
# pylint: disable=unused-argument
@tornado.gen.coroutine
def fetch_backpressure(self, cluster, metric, topology, component, instance, \
timerange, is_max, environ=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param isMax:
:param environ:
:return:
'''
instances = yield get_instances(cluster, environ, topology)
if component != "*":
filtered_inst = [instance for instance in instances if instance.split("_")[2] == component]
else:
filtered_inst = instances
futures_dict = {}
for inst in filtered_inst:
query = queries.get(metric).format(inst)
futures_dict[inst] = get_metrics(cluster, environ, topology, timerange, query)
res = yield futures_dict
if not is_max:
timelines = []
for key in res:
result = res[key]
# Replacing stream manager instance name with component instance name
if len(result["timeline"]) > 0:
result["timeline"][0]["instance"] = key
timelines.extend(result["timeline"])
result = self.get_metric_response(timerange, timelines, is_max)
else:
data = self.compute_max(res.values())
result = self.get_metric_response(timerange, data, is_max)
raise tornado.gen.Return(result)
# pylint: disable=no-self-use
def compute_max(self, multi_ts):
'''
:param multi_ts:
:return:
'''
if len(multi_ts) > 0 and len(multi_ts[0]["timeline"]) > 0:
keys = multi_ts[0]["timeline"][0]["data"].keys()
timelines = ([res["timeline"][0]["data"][key] for key in keys] for res in multi_ts)
values = (max(v) for v in zip(*timelines))
return dict(zip(keys, values))
return {}
# pylint: disable=no-self-use
def get_metric_response(self, timerange, data, isMax):
'''
:param timerange:
:param data:
:param isMax:
:return:
'''
if isMax:
return dict(
status="success",
starttime=timerange[0],
endtime=timerange[1],
result=dict(timeline=[dict(data=data)])
)
return dict(
status="success",
starttime=timerange[0],
endtime=timerange[1],
result=dict(timeline=data)
)
# pylint: disable=no-self-use
def get_query(self, metric, component, instance):
'''
:param metric:
:param component:
:param instance:
:return:
'''
q = queries.get(metric)
return q.format(component, instance)
| |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for variable store."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import threading
import numpy
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
from tensorflow.python.util import compat
from tensorflow.python.util import tf_inspect
class VariableScopeTest(test.TestCase):
def tearDown(self):
gc.collect()
# This will only contain uncollectable garbage, i.e. reference cycles
# involving objects with __del__ defined.
self.assertEqual(0, len(gc.garbage))
def testGetVar(self):
vs = variable_scope._get_default_variable_store()
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertEqual(v, v1)
@test_util.run_in_graph_and_eager_modes
def testResource(self):
vs = variable_scope._get_default_variable_store()
v1 = vs.get_variable("v", [1], use_resource=True)
self.assertTrue(isinstance(v1, resource_variable_ops.ResourceVariable))
def testNameExists(self):
vs = variable_scope._get_default_variable_store()
# No check by default, so we can both create and get existing names.
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertEqual(v, v1)
# When reuse is False, we fail when variables are already there.
vs.get_variable("w", [1], reuse=False) # That's ok.
with self.assertRaises(ValueError):
vs.get_variable("v", [1], reuse=False) # That fails.
# When reuse is True, we fail when variables are new.
vs.get_variable("v", [1], reuse=True) # That's ok.
with self.assertRaises(ValueError):
vs.get_variable("u", [1], reuse=True) # That fails.
def testNamelessStore(self):
vs = variable_scope._get_default_variable_store()
vs.get_variable("v1", [2])
vs.get_variable("v2", [2])
expected_names = ["%s:0" % name for name in ["v1", "v2"]]
self.assertEqual(
set(expected_names), set([v.name for v in vs._vars.values()]))
@test_util.run_in_graph_and_eager_modes
def testVarScopeInitializer(self):
init = init_ops.constant_initializer(0.3)
with variable_scope.variable_scope("tower0") as tower:
with variable_scope.variable_scope("foo", initializer=init):
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.3)
with variable_scope.variable_scope(tower, initializer=init):
w = variable_scope.get_variable("w", [])
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), 0.3)
@test_util.run_in_graph_and_eager_modes
def testVarScopeConstraint(self):
constraint = lambda x: 0. * x
with variable_scope.variable_scope("tower1") as tower:
with variable_scope.variable_scope("foo", constraint=constraint):
v = variable_scope.get_variable("v", [])
self.assertEqual(v.constraint, constraint)
with variable_scope.variable_scope(tower, constraint=constraint):
w = variable_scope.get_variable("w", [])
self.assertEqual(w.constraint, constraint)
def testStringDefaultInitializer(self):
with self.cached_session():
v = variable_scope.get_variable("string", shape=[], dtype=dtypes.string)
variables_lib.global_variables_initializer().run()
self.assertAllEqual(compat.as_bytes(v.eval()), b"")
@test_util.run_in_graph_and_eager_modes
def testVarScopeDType(self):
with variable_scope.variable_scope("tower2") as tower:
with variable_scope.variable_scope("foo", dtype=dtypes.float16):
v = variable_scope.get_variable("v", [])
self.assertEqual(v.dtype.base_dtype, dtypes.float16)
with variable_scope.variable_scope(tower, dtype=dtypes.float16):
w = variable_scope.get_variable("w", [])
self.assertEqual(w.dtype.base_dtype, dtypes.float16)
def testGetVariableInGraphNestedUnderEagerContext(self):
with context.eager_mode():
@function.defun
def f():
v = variable_scope.get_variable("should_be_resource", [])
self.assertEqual(type(v), resource_variable_ops.ResourceVariable)
f()
def testEagerVariableStore(self):
with context.eager_mode():
store = variable_scope.EagerVariableStore()
with store.as_default():
v = variable_scope.get_variable("v", shape=(), trainable=True)
w = variable_scope.get_variable("w", shape=(), trainable=False)
self.assertTrue(v in store.variables())
self.assertTrue(w in store.variables())
self.assertTrue(v in store.trainable_variables())
self.assertFalse(w in store.trainable_variables())
self.assertFalse(v in store.non_trainable_variables())
self.assertTrue(w in store.non_trainable_variables())
# Test copying.
new_store = store.copy()
with new_store.as_default():
new_v = variable_scope.get_variable("v")
new_w = variable_scope.get_variable("w")
self.assertEqual(new_v.numpy(), v.numpy())
self.assertEqual(new_w.numpy(), w.numpy())
self.assertTrue(new_v in new_store.variables())
self.assertTrue(new_w in new_store.variables())
self.assertTrue(new_v in new_store.trainable_variables())
self.assertFalse(new_w in new_store.trainable_variables())
self.assertFalse(new_v in new_store.non_trainable_variables())
self.assertTrue(new_w in new_store.non_trainable_variables())
# Check that variables are separate instances.
for v in store.variables():
v.assign(-1)
for v in new_store.variables():
v.assign(1)
for v in store.variables():
self.assertEqual(v.numpy(), -1)
for v in new_store.variables():
self.assertEqual(v.numpy(), 1)
def testEagerVariableStoreWithEagerDefun(self):
with context.eager_mode():
@function.defun
def f():
x = constant_op.constant([[2.0]])
d1 = core_layers.Dense(
1, name="my_dense", kernel_initializer=init_ops.ones_initializer())
_ = d1(x) # create variables
self.assertEqual(len(d1.variables), 2)
v1, v2 = d1.variables
d2 = core_layers.Dense(
1,
name="my_dense",
kernel_initializer=init_ops.ones_initializer(),
_reuse=True)
_ = d2(x)
self.assertEqual(len(d2.variables), 2)
v3, v4 = d2.variables
self.assertAllEqual([v1, v2], [v3, v4])
f()
@test_util.run_in_graph_and_eager_modes
def testEagerVariablesStoreAddsToCollections(self):
store = variable_scope.EagerVariableStore()
with store.as_default():
trainable = variable_scope.get_variable("v1", [], trainable=True)
not_trainable = variable_scope.get_variable("v2", [], trainable=False)
concat = variable_scope.get_variable(
"v3", [], collections=[ops.GraphKeys.CONCATENATED_VARIABLES])
self.assertEqual(
ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES),
[trainable, not_trainable])
self.assertEqual(
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES),
[trainable, concat])
self.assertEqual(
ops.get_collection(ops.GraphKeys.CONCATENATED_VARIABLES), [concat])
@test_util.run_in_graph_and_eager_modes
def testEagerVariablesOutsideStoreNotAddedToCollections(self):
if not context.executing_eagerly():
return
variable_scope.get_variable("v1", [], trainable=True)
variable_scope.get_variable("v2", [], trainable=False)
self.assertFalse(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
self.assertFalse(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))
@test_util.run_in_graph_and_eager_modes
def testInitFromNonTensorValue(self):
v = variable_scope.get_variable("v4", initializer=4, dtype=dtypes.int32)
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 4)
w = variable_scope.get_variable(
"w4", initializer=numpy.array([1, 2, 3]), dtype=dtypes.int64)
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), [1, 2, 3])
# A quirk to be revisited?
error = ValueError if context.executing_eagerly() else TypeError
with self.assertRaises(error):
variable_scope.get_variable("x4", initializer={})
@test_util.run_in_graph_and_eager_modes
def testInitFromNonInitializer(self):
# Test various dtypes with zeros initializer as following:
types = [
dtypes.int8, dtypes.uint8, dtypes.int16, dtypes.uint16, dtypes.int32,
dtypes.int64, dtypes.bool
]
# Use different variable_name to distinguish various dtypes
for (i, dtype) in enumerate(types):
x = variable_scope.get_variable(
name="xx%d" % i, shape=(3, 4), dtype=dtype)
y = variable_scope.get_variable(
name="yy%d" % i,
shape=(3, 4),
dtype=dtype,
initializer=init_ops.zeros_initializer(dtype=dtype))
self.evaluate(variables_lib.global_variables_initializer())
self.assertAllEqual(self.evaluate(x.value()), self.evaluate(y.value()))
# TODO(alive): support variable partitioning/caching in eager mode.
def testVarScopeCachingDevice(self):
with self.cached_session():
caching_device = "/job:moo"
with variable_scope.variable_scope("tower"):
with variable_scope.variable_scope(
"caching", caching_device=caching_device):
v = variable_scope.get_variable("v", [])
self.assertTrue(v.value().device.startswith(caching_device))
with variable_scope.variable_scope("child"):
v2 = variable_scope.get_variable("v", [])
self.assertTrue(v2.value().device.startswith(caching_device))
with variable_scope.variable_scope("not_cached", caching_device=""):
v2_not_cached = variable_scope.get_variable("v", [])
self.assertFalse(
v2_not_cached.value().device.startswith(caching_device))
with variable_scope.variable_scope(
"not_cached_identity_device",
caching_device=lambda op: op.device):
v2_identity_device = variable_scope.get_variable("v", [])
self.assertFalse(
v2_identity_device.value().device.startswith(caching_device))
with variable_scope.variable_scope("we_will_do_it_live") as vs_live:
vs_live.set_caching_device("/job:live")
v_live = variable_scope.get_variable("v", [])
self.assertTrue(v_live.value().device.startswith("/job:live"))
v_tower = variable_scope.get_variable("v", [])
self.assertFalse(v_tower.value().device.startswith(caching_device))
@test_util.run_in_graph_and_eager_modes
def testVarScopeRegularizer(self):
init = init_ops.constant_initializer(0.3)
def regularizer1(v):
return math_ops.reduce_mean(v) + 0.1
def regularizer2(v):
return math_ops.reduce_mean(v) + 0.2
with variable_scope.variable_scope(
"tower3", regularizer=regularizer1) as tower:
with variable_scope.variable_scope("foo", initializer=init):
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(1, len(losses))
self.assertAllClose(self.evaluate(losses[0]), 0.4)
with variable_scope.variable_scope(tower, initializer=init) as vs:
u = variable_scope.get_variable("u", [])
vs.set_regularizer(regularizer2)
w = variable_scope.get_variable("w", [])
# Next 3 variable not regularized to test disabling regularization.
x = variable_scope.get_variable(
"x", [], regularizer=variable_scope.no_regularizer)
with variable_scope.variable_scope(
"baz", regularizer=variable_scope.no_regularizer):
y = variable_scope.get_variable("y", [])
vs.set_regularizer(variable_scope.no_regularizer)
z = variable_scope.get_variable("z", [])
# Check results.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(3, len(losses))
self.evaluate(variables_lib.variables_initializer([u, w, x, y, z]))
self.assertAllClose(self.evaluate(losses[0]), 0.4)
self.assertAllClose(self.evaluate(losses[1]), 0.4)
self.assertAllClose(self.evaluate(losses[2]), 0.5)
with variable_scope.variable_scope("foo", reuse=True):
# reuse=True is for now only supported when eager execution is disabled.
if not context.executing_eagerly():
v = variable_scope.get_variable("v",
[]) # "v" is already there, reused
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(3, len(losses)) # No new loss added.
@test_util.run_in_graph_and_eager_modes
def testInitializeFromValue(self):
init = constant_op.constant(0.1)
w = variable_scope.get_variable("v", initializer=init)
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), 0.1)
with self.assertRaisesRegexp(ValueError, "shape"):
# We disallow explicit shape specification when initializer is constant.
variable_scope.get_variable("u", [1], initializer=init)
with variable_scope.variable_scope("foo", initializer=init):
# Constant initializer can be passed through scopes if needed.
v = variable_scope.get_variable("v")
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.1)
# Check that non-float32 initializer creates a non-float32 variable.
init = constant_op.constant(1, dtype=dtypes.int32)
t = variable_scope.get_variable("t", initializer=init)
self.assertEqual(t.dtype.base_dtype, dtypes.int32)
# Raise error if `initializer` dtype and `dtype` are not identical.
with self.assertRaisesRegexp(ValueError, "don't match"):
variable_scope.get_variable("s", initializer=init, dtype=dtypes.float64)
def testControlDeps(self):
with self.cached_session() as sess:
v0 = variable_scope.get_variable(
"v0", [1], initializer=init_ops.constant_initializer(0))
with ops.control_dependencies([v0.value()]):
v1 = variable_scope.get_variable(
"v1", [1], initializer=init_ops.constant_initializer(1))
add = v1 + v0
# v0 should be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
# We should be able to initialize and run v1 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual(1, sess.run(v1))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
def testEnableResourceVariables(self):
old = variable_scope._DEFAULT_USE_RESOURCE
try:
variable_scope.enable_resource_variables()
self.assertTrue(isinstance(variables_lib.VariableV1(1.0),
resource_variable_ops.ResourceVariable))
variable_scope.disable_resource_variables()
self.assertFalse(isinstance(variables_lib.VariableV1(1.0),
resource_variable_ops.ResourceVariable))
finally:
variable_scope._DEFAULT_USE_RESOURCE = old
def testControlFlow(self):
with self.cached_session() as sess:
v0 = variable_scope.get_variable(
"v0", [], initializer=init_ops.constant_initializer(0))
var_dict = {}
# Call get_variable in each of the cond clauses.
def var_in_then_clause():
v1 = variable_scope.get_variable(
"v1", [1], initializer=init_ops.constant_initializer(1))
var_dict["v1"] = v1
return v1 + v0
def var_in_else_clause():
v2 = variable_scope.get_variable(
"v2", [1], initializer=init_ops.constant_initializer(2))
var_dict["v2"] = v2
return v2 + v0
add = control_flow_ops.cond(
math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause)
v1 = var_dict["v1"]
v2 = var_dict["v2"]
# We should be able to initialize and run v1 and v2 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual([1], sess.run(v1))
sess.run(v2.initializer)
self.assertEqual([2], sess.run(v2))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
# We should not be able to run 'add' yet.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
@test_util.run_in_graph_and_eager_modes
def testGetVariableScope(self):
# Test the get_variable_scope() function and setting properties of result.
init = init_ops.constant_initializer(0.3)
with variable_scope.variable_scope("bar"):
new_init1 = variable_scope.get_variable_scope().initializer
self.assertEqual(new_init1, None)
# Check that we can set initializer like this.
variable_scope.get_variable_scope().set_initializer(init)
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.3)
if not context.executing_eagerly():
# Check that we can set reuse.
variable_scope.get_variable_scope().reuse_variables()
with self.assertRaises(ValueError): # Fail, w does not exist yet.
variable_scope.get_variable("w", [1])
# Check that the set initializer goes away.
new_init = variable_scope.get_variable_scope().initializer
self.assertEqual(new_init, None)
@test_util.run_in_graph_and_eager_modes
def testVarScope(self):
with variable_scope.variable_scope("tower4") as tower:
self.assertEqual(tower.name, "tower4")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower4/scope/")
with variable_scope.variable_scope("tower5"):
with variable_scope.variable_scope("bar") as bar:
self.assertEqual(bar.name, "tower5/bar")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower5/bar/scope/")
with variable_scope.variable_scope("tower6"):
with variable_scope.variable_scope(tower, reuse=True) as tower_shared:
self.assertEqual(tower_shared.name, "tower4")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower6/tower4/scope/")
@test_util.run_in_graph_and_eager_modes
def testVarScopeNameScope(self):
with ops.name_scope("testVarScopeNameScope1"):
with variable_scope.variable_scope("tower") as tower:
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower/scope2/")
if not context.executing_eagerly():
with variable_scope.variable_scope(
tower): # Re-entering acts like another "tower".
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower_1/scope2/")
with variable_scope.variable_scope(
"tower"): # Re-entering by string acts the same.
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower_2/scope2/")
with ops.name_scope("testVarScopeNameScope2"):
with variable_scope.variable_scope("tower"):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope2/tower/scope2/")
if not context.executing_eagerly():
with variable_scope.variable_scope(tower):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope2/tower_1/scope2/")
root_var_scope = variable_scope.get_variable_scope()
with ops.name_scope("testVarScopeNameScope3"):
with variable_scope.variable_scope(root_var_scope):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope3/scope2/")
def testVarScopeOriginalNameScope(self):
with self.cached_session():
with ops.name_scope("scope1"):
with variable_scope.variable_scope("tower") as tower:
self.assertEqual(tower.original_name_scope, "scope1/tower/")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "scope1/tower/scope2/")
with ops.name_scope("scope2"):
with variable_scope.variable_scope(tower) as tower1:
# Re-entering preserves original name scope.
self.assertEqual(tower1.original_name_scope, "scope1/tower/")
with ops.name_scope("foo") as sc2:
self.assertEqual(sc2, "scope2/tower/foo/")
# Test re-entering original name scope.
with ops.name_scope(tower.original_name_scope):
with ops.name_scope("bar") as sc3:
self.assertEqual(sc3, "scope1/tower/bar/")
with ops.name_scope("scope2"):
with variable_scope.variable_scope(tower):
with ops.name_scope(tower.original_name_scope):
with ops.name_scope("bar") as sc3:
self.assertEqual(sc3, "scope1/tower/bar_1/")
def testVarScopeObjectReuse(self):
with self.cached_session():
vs = None
with variable_scope.variable_scope("jump", reuse=True) as scope:
vs = scope
with variable_scope.variable_scope(vs) as jump:
self.assertTrue(jump.reuse)
with variable_scope.variable_scope(vs, reuse=True) as jump_reuse:
self.assertTrue(jump_reuse.reuse)
with variable_scope.variable_scope(vs, reuse=False) as jump_no_reuse:
self.assertTrue(jump_no_reuse.reuse) # Inherited, cannot be undone.
with variable_scope.variable_scope("jump", reuse=False) as scope:
vs = scope
with variable_scope.variable_scope(vs) as jump:
self.assertFalse(jump.reuse)
with variable_scope.variable_scope(vs, reuse=True) as jump_reuse:
self.assertTrue(jump_reuse.reuse)
with variable_scope.variable_scope(vs, reuse=False) as jump_no_reuse:
self.assertFalse(jump_no_reuse.reuse)
def testVarScopeGetOrCreateReuse(self):
with self.cached_session():
def test_value(value):
x = constant_op.constant(value)
with variable_scope.variable_scope(
"testVarScopeGetOrCreateReuse_bar",
reuse=variable_scope.AUTO_REUSE):
_ = state_ops.assign(variable_scope.get_variable("var", []), x)
with variable_scope.variable_scope(
"testVarScopeGetOrCreateReuse_bar",
reuse=variable_scope.AUTO_REUSE):
_ = variable_scope.get_variable("var", [])
self.assertEqual(value, x.eval())
test_value(42.) # Variable is created.
test_value(13.) # Variable is reused hereafter.
test_value(17.)
def testVarOpScope(self):
with self.cached_session():
with ops.name_scope("testVarOpScope1"):
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "tower/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope1/tower/testVarOpScope2/")
with variable_scope.variable_scope("tower", "default", []):
with self.assertRaises(ValueError):
variable_scope.get_variable("w", [])
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope1/tower_1/testVarOpScope2/")
with ops.name_scope("testVarOpScope2"):
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope2/default/testVarOpScope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default_1/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope2/default_1/testVarOpScope2/")
def testVarOpScopeUniqueNamesInterleavedSubstringScopes(self):
with self.cached_session():
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1_1/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1_2/layer/w:0")
def testVarOpScopeUniqueNamesWithJump(self):
with self.cached_session():
with variable_scope.variable_scope("default") as default:
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer/w:0")
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer_1/w:0")
with variable_scope.variable_scope(default):
pass
# No matter the jump in the middle, unique numbering continues.
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer_2/w:0")
def testVarOpScopeReuse(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True) as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testVarScopeGetVar(self):
with self.cached_session():
with variable_scope.variable_scope("root"):
with variable_scope.variable_scope("towerA") as tower_a:
va = variable_scope.get_variable("v", [1])
self.assertEqual(va.name, "root/towerA/v:0")
with variable_scope.variable_scope(tower_a, reuse=True):
va2 = variable_scope.get_variable("v", [1])
self.assertEqual(va2, va)
with variable_scope.variable_scope("towerB"):
vb = variable_scope.get_variable("v", [1])
self.assertEqual(vb.name, "root/towerB/v:0")
with self.assertRaises(ValueError):
with variable_scope.variable_scope("towerA"):
va2 = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope("towerA", reuse=True):
va2 = variable_scope.get_variable("v", [1])
self.assertEqual(va2, va)
with variable_scope.variable_scope("foo"):
with variable_scope.variable_scope("bar"):
v = variable_scope.get_variable("v", [1])
self.assertEqual(v.name, "root/foo/bar/v:0")
with variable_scope.variable_scope(tower_a, reuse=True):
va3 = variable_scope.get_variable("v", [1])
self.assertEqual(va, va3)
with self.assertRaises(ValueError):
with variable_scope.variable_scope(tower_a, reuse=True):
with variable_scope.variable_scope("baz"):
variable_scope.get_variable("v", [1])
with self.assertRaises(ValueError) as exc:
with variable_scope.variable_scope(tower_a, reuse=True):
variable_scope.get_variable("v", [2]) # Different shape.
self.assertEqual("shape" in str(exc.exception), True)
with self.assertRaises(ValueError) as exc:
with variable_scope.variable_scope(tower_a, reuse=True):
variable_scope.get_variable("v", [1], dtype=dtypes.int32)
self.assertEqual("dtype" in str(exc.exception), True)
def testVarScopeOuterScope(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
pass
with variable_scope.variable_scope(outer):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope("default"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/scope2/")
with variable_scope.variable_scope("default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/default/scope2/")
def testVarScopeNestedOuterScope(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer/scope2/")
with variable_scope.variable_scope("default"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer_1/scope2/")
with variable_scope.variable_scope("default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default_1/scope2/")
def testVarOpScopeReuseParam(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer) as outer:
with variable_scope.variable_scope("tower", "default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/tower/scope2/")
outer.reuse_variables()
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testVarOpScopeReuseError(self):
with self.cached_session():
with self.assertRaises(ValueError):
with variable_scope.variable_scope(None, "default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
def testVarOpScopeOuterScope(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
pass
with variable_scope.variable_scope(outer, "default", []):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/scope2/")
outer.reuse_variables()
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/default/scope2/")
def testVarOpScopeNestedOuterScope(self):
with self.cached_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testBasicWhenAuxiliaryNameScopeIsFalse(self):
with self.cached_session():
with variable_scope.variable_scope(
"scope", auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w", []).name, "scope/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
with variable_scope.variable_scope(scope, auxiliary_name_scope=False):
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(
variable_scope.get_variable("w1", []).name, "scope/w1:0")
self.assertEqual(constant_op.constant([], name="c1").name, "c1:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("scope"):
self.assertEqual(constant_op.constant([], name="c").name, "scope/c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
"inner", auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/inner/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "outer/c:0")
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as inner1:
self.assertEqual(inner1.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w1", []).name, "outer/inner/w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/c1:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("inner"):
self.assertEqual(
constant_op.constant([], name="c").name, "outer/inner/c:0")
def testCreatedByDefaultNameWhenAuxiliaryNameScopeIsFalse(self):
with self.cached_session():
with variable_scope.variable_scope(
None, default_name="default", auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("default"):
self.assertEqual(constant_op.constant([], name="c").name, "default/c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
None, default_name="default", auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "outer/c:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("default"):
self.assertEqual(
constant_op.constant([], name="c").name, "outer/default/c:0")
def testReenterRootScopeWhenAuxiliaryNameScopeIsFalse(self):
with self.cached_session():
root_scope = variable_scope.get_variable_scope()
with variable_scope.variable_scope(
root_scope, auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w", []).name, "w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
root_scope, auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w1", []).name, "w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/c1:0")
def testAuxiliaryNameScopeIsInvalid(self):
with self.cached_session():
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
None, default_name="scope", auxiliary_name_scope="invalid"):
pass
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
"scope", auxiliary_name_scope="invalid"):
pass
with variable_scope.variable_scope("scope") as scope:
pass
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
scope, auxiliary_name_scope="invalid"):
pass
def testReuseScopeWithoutNameScopeCollision(self):
# Github issue: #13429
with self.cached_session():
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope("inner") as inner:
pass
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as scope:
with ops.name_scope(scope.original_name_scope):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/inner/w:0")
self.assertEqual(
constant_op.constant([], name="c").name, "outer/inner/c:0")
with ops.name_scope("inner"):
self.assertEqual(constant_op.constant([], name="c").name, "inner/c:0")
with variable_scope.variable_scope("another"):
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as scope1:
with ops.name_scope(scope1.original_name_scope):
self.assertEqual(
variable_scope.get_variable("w1", []).name, "outer/inner/w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/inner/c1:0")
with ops.name_scope("inner"):
self.assertEqual(
constant_op.constant([], name="c").name, "another/inner/c:0")
@test_util.run_in_graph_and_eager_modes
def testGetLocalVar(self):
# Check that local variable respects naming.
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer, "default", []):
local_var = variable_scope.get_local_variable(
"w", [], collections=["foo"])
self.assertEqual(local_var.name, "outer/w:0")
if not context.executing_eagerly():
# Since variable is local, it should be in the local variable collection
# but not the trainable collection.
self.assertIn(local_var,
ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES))
self.assertIn(local_var, ops.get_collection("foo"))
self.assertNotIn(local_var,
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))
# Check that local variable respects `reuse`.
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(
variable_scope.get_local_variable("w", []).name, "outer/w:0")
def testSignatureGetVarVsGetLocalVar(self):
"""get_{local,}variable() must take the same list of args."""
arg_names = tf_inspect.getargspec(variable_scope.get_variable)[0]
local_arg_names = tf_inspect.getargspec(
variable_scope.get_local_variable)[0]
self.assertEqual(arg_names, local_arg_names)
def testGetVarWithDevice(self):
g = ops.Graph()
varname_type = []
def device_func(op):
if op.type in ["Variable", "VariableV2", "VarHandleOp"]:
varname_type.append((op.name, op.get_attr("dtype")))
return "/device:GPU:0"
with g.as_default():
with ops.device(device_func):
_ = variable_scope.get_variable("x", (100, 200))
_ = variable_scope.get_variable(
"y", dtype=dtypes.int64, initializer=numpy.arange(73))
self.assertEqual(varname_type[0], ("x", dtypes.float32))
self.assertEqual(varname_type[1], ("y", dtypes.int64))
def testGetCollection(self):
with self.cached_session():
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
with variable_scope.variable_scope("testGetCollection_foo_") as scope1:
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
self.assertEqual([
v.name
for v in scope1.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], ["testGetCollection_foo_/testGetCollection_a:0"])
self.assertEqual([
v.name
for v in scope1.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_b:0"
])
with variable_scope.variable_scope("testGetCollection_foo") as scope2:
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
self.assertEqual([
v.name
for v in scope2.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], ["testGetCollection_foo/testGetCollection_a:0"])
self.assertEqual([
v.name
for v in scope2.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_foo/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_b:0"
])
scope = variable_scope.get_variable_scope()
self.assertEqual([
v.name for v in scope.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_a:0", "testGetCollection_b:0",
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_b:0",
"testGetCollection_foo/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_b:0"
])
self.assertEqual([
v.name
for v in scope.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], [
"testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_a:0"
])
def testGetTrainableVariablesWithGetVariable(self):
with self.cached_session():
_ = variable_scope.get_variable("testGetTrainableVariables_a", [])
with variable_scope.variable_scope(
"testGetTrainableVariables_foo") as scope:
_ = variable_scope.get_variable("testGetTrainableVariables_b", [])
_ = variable_scope.get_variable(
"testGetTrainableVariables_c", [], trainable=False)
# sync `ON_READ` sets trainable=False
_ = variable_scope.get_variable(
"testGetTrainableVariables_d", [],
synchronization=variable_scope.VariableSynchronization.ON_READ)
self.assertEqual(
[v.name for v in scope.trainable_variables()],
["testGetTrainableVariables_foo/testGetTrainableVariables_b:0"])
# All other sync values sets trainable=True
_ = variable_scope.get_variable(
"testGetTrainableVariables_e", [],
synchronization=variable_scope.VariableSynchronization.ON_WRITE)
self.assertEqual([v.name for v in scope.trainable_variables()], [
"testGetTrainableVariables_foo/testGetTrainableVariables_b:0",
"testGetTrainableVariables_foo/testGetTrainableVariables_e:0"
])
with self.assertRaisesRegexp(
ValueError, "Synchronization value can be set to "
"VariableSynchronization.ON_READ only for non-trainable variables. "
"You have specified trainable=True and "
"synchronization=VariableSynchronization.ON_READ."):
_ = variable_scope.get_variable(
"testGetTrainableVariables_e", [],
synchronization=variable_scope.VariableSynchronization.ON_READ,
trainable=True)
def testGetTrainableVariablesWithVariable(self):
with self.cached_session():
_ = variable_scope.variable(1.0, name="testGetTrainableVariables_a")
with variable_scope.variable_scope(
"testGetTrainableVariables_foo") as scope:
_ = variable_scope.variable(1.0, name="testGetTrainableVariables_b")
_ = variable_scope.variable(
1.0, name="testGetTrainableVariables_c", trainable=False)
# sync `ON_READ` sets trainable=False
_ = variable_scope.variable(
1.0,
name="testGetTrainableVariables_d",
synchronization=variable_scope.VariableSynchronization.ON_READ)
self.assertEqual(
[v.name for v in scope.trainable_variables()],
["testGetTrainableVariables_foo/testGetTrainableVariables_b:0"])
# All other sync values sets trainable=True
_ = variable_scope.variable(
1.0,
name="testGetTrainableVariables_e",
synchronization=variable_scope.VariableSynchronization.ON_WRITE)
self.assertEqual([v.name for v in scope.trainable_variables()], [
"testGetTrainableVariables_foo/testGetTrainableVariables_b:0",
"testGetTrainableVariables_foo/testGetTrainableVariables_e:0"
])
with self.assertRaisesRegexp(
ValueError, "Synchronization value can be set to "
"VariableSynchronization.ON_READ only for non-trainable variables. "
"You have specified trainable=True and "
"synchronization=VariableSynchronization.ON_READ."):
_ = variable_scope.variable(
1.0,
name="testGetTrainableVariables_e",
synchronization=variable_scope.VariableSynchronization.ON_READ,
trainable=True)
def testGetGlobalVariables(self):
with self.cached_session():
_ = variable_scope.get_variable("testGetGlobalVariables_a", [])
with variable_scope.variable_scope("testGetGlobalVariables_foo") as scope:
_ = variable_scope.get_variable("testGetGlobalVariables_b", [])
self.assertEqual(
[v.name for v in scope.global_variables()],
["testGetGlobalVariables_foo/"
"testGetGlobalVariables_b:0"])
def testGetLocalVariables(self):
with self.cached_session():
_ = variable_scope.get_variable(
"a", [], collections=[ops.GraphKeys.LOCAL_VARIABLES])
with variable_scope.variable_scope("foo") as scope:
_ = variable_scope.get_variable(
"b", [], collections=[ops.GraphKeys.LOCAL_VARIABLES])
_ = variable_scope.get_variable("c", [])
self.assertEqual([v.name for v in scope.local_variables()], ["foo/b:0"])
def testGetVariableWithRefDtype(self):
v = variable_scope.get_variable("v", shape=[3, 4], dtype=dtypes.float32)
# Ensure it is possible to do get_variable with a _ref dtype passed in.
_ = variable_scope.get_variable("w", shape=[5, 6], dtype=v.dtype)
def testTwoGraphs(self):
def f():
g1 = ops.Graph()
g2 = ops.Graph()
with g1.as_default():
with g2.as_default():
with variable_scope.variable_scope("_"):
pass
self.assertRaisesRegexp(ValueError, "'_' is not a valid scope name", f)
def axis0_into1_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
return part
def axis0_into2_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
part[0] = 2
return part
def axis0_into3_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
part[0] = 3
return part
class VariableScopeWithPartitioningTest(test.TestCase):
def testResultNameMatchesRequested(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=(3, 1, 1))
self.assertEqual(v.name, "scope0/name0")
v_concat = v.as_tensor()
self.assertEqual(v_concat.name, "scope0/name0:0")
variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
self.assertIn("scope0/name0/part_0:0", [x.name for x in variables])
self.assertIn("scope0/name0/part_1:0", [x.name for x in variables])
self.assertNotIn("scope0/name0/part_2:0", [x.name for x in variables])
def testBreaksIfPartitioningChanges(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into3_partitioner, reuse=True):
with self.assertRaisesRegexp(
ValueError,
"Trying to reuse partitioned variable .* but specified partitions .* "
"and found partitions .*"):
variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into1_partitioner, reuse=True):
with self.assertRaisesRegexp(
ValueError,
"Trying to reuse partitioned variable .* but specified partitions .* "
"and found partitions .*"):
variable_scope.get_variable("name0", shape=(3, 1, 1))
def testReturnsExistingConcatenatedValueIfReuse(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v_concat = variable_scope.get_variable("name0", shape=(3, 1, 1))
variable_scope.get_variable_scope().reuse_variables()
v_concat_2 = variable_scope.get_variable("name0", shape=(3, 1, 1))
self.assertEqual(v_concat, v_concat_2)
def testAllowsReuseWithoutPartitioner(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope("scope0", reuse=True):
v_reused = variable_scope.get_variable("name0")
self.assertEqual(v, v_reused)
def testPropagatePartitionerOnReopening(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner) as vs:
self.assertEqual(axis0_into2_partitioner, vs.partitioner)
with variable_scope.variable_scope(vs) as vs1:
self.assertEqual(axis0_into2_partitioner, vs1.partitioner)
def testScalarIgnoresPartitioner(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=())
self.assertEqual(v.name, "scope0/name0:0")
variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
self.assertIn("scope0/name0:0", [x.name for x in variables])
def _testPartitionConcatenatesAlongCorrectAxis(self, use_resource):
def _part_axis_0(**unused_kwargs):
return (2, 1, 1)
def _part_axis_1(**unused_kwargs):
return (1, 2, 1)
with variable_scope.variable_scope("root", use_resource=use_resource):
v0 = variable_scope.get_variable(
"n0", shape=(2, 2, 2), partitioner=_part_axis_0)
v1 = variable_scope.get_variable(
"n1", shape=(2, 2, 2), partitioner=_part_axis_1)
self.assertEqual(v0.get_shape(), (2, 2, 2))
self.assertEqual(v1.get_shape(), (2, 2, 2))
n0_0 = list(v0)[0]
n0_1 = list(v0)[1]
self.assertEqual(n0_0.get_shape(), (1, 2, 2))
self.assertEqual(n0_1.get_shape(), (1, 2, 2))
n1_0 = list(v1)[0]
n1_1 = list(v1)[1]
self.assertEqual(n1_0.get_shape(), (2, 1, 2))
self.assertEqual(n1_1.get_shape(), (2, 1, 2))
def testPartitionConcatenatesAlongCorrectAxis(self):
self._testPartitionConcatenatesAlongCorrectAxis(use_resource=False)
def testPartitionConcatenatesAlongCorrectAxisResource(self):
self._testPartitionConcatenatesAlongCorrectAxis(use_resource=True)
class VariableScopeWithCustomGetterTest(test.TestCase):
def testNonCallableGetterFails(self):
with self.assertRaisesRegexp(ValueError, r"custom_getter .* not callable:"):
with variable_scope.variable_scope("scope0", custom_getter=3):
variable_scope.get_variable("name0")
with self.assertRaisesRegexp(ValueError, r"custom_getter .* not callable:"):
variable_scope.get_variable("name0", custom_getter=3)
def testNoSideEffectsWithIdentityCustomGetter(self):
called = [0]
def custom_getter(getter, *args, **kwargs):
called[0] += 1
return getter(*args, **kwargs)
with variable_scope.variable_scope(
"scope", custom_getter=custom_getter) as scope:
v = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope(scope, reuse=True):
v2 = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope("new_scope") as new_scope:
v3 = variable_scope.get_variable("v3", [1])
with variable_scope.variable_scope(
new_scope, reuse=True, custom_getter=custom_getter):
v4 = variable_scope.get_variable("v3", [1])
self.assertEqual(v, v2)
self.assertEqual(v3, v4)
self.assertEqual(3, called[0]) # skipped one in the first new_scope
def testSynchronizationAndAggregationWithCustomGetter(self):
called = [0]
synchronization = variable_scope.VariableSynchronization.AUTO
aggregation = variable_scope.VariableAggregation.NONE
def custom_getter(getter, *args, **kwargs):
called[0] += 1
# Verify synchronization and aggregation kwargs are as expected.
self.assertEqual(kwargs["synchronization"], synchronization)
self.assertEqual(kwargs["aggregation"], aggregation)
return getter(*args, **kwargs)
with variable_scope.variable_scope("scope", custom_getter=custom_getter):
variable_scope.get_variable("v", [1])
self.assertEqual(1, called[0])
with variable_scope.variable_scope("scope", custom_getter=custom_getter):
synchronization = variable_scope.VariableSynchronization.ON_READ
aggregation = variable_scope.VariableAggregation.MEAN
variable_scope.get_variable(
"v1", [1], synchronization=synchronization, aggregation=aggregation)
self.assertEqual(2, called[0])
def testCustomGetterWithReuse(self):
# Custom getter can choose to behave differently on reused variables.
def custom_getter(getter, *args, **kwargs):
var = getter(*args, **kwargs)
if kwargs["reuse"]:
# This can be used, e.g., for changing the caching device if needed.
return array_ops.identity(var, name="reused")
else:
return array_ops.identity(var, name="not_reused")
with variable_scope.variable_scope(
"scope", custom_getter=custom_getter) as scope:
v = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope(scope, reuse=True):
v2 = variable_scope.get_variable("v", [1])
self.assertEqual(v.name, "not_reused:0")
self.assertEqual(v2.name, "reused:0")
def testGetterThatCreatesTwoVariablesAndSumsThem(self):
def custom_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/0" % name, *args, **kwargs)
g_1 = getter("%s/1" % name, *args, **kwargs)
with ops.name_scope("custom_getter"):
return g_0 + g_1
with variable_scope.variable_scope("scope", custom_getter=custom_getter):
v = variable_scope.get_variable("v", [1, 2, 3])
self.assertEqual([1, 2, 3], v.get_shape())
true_vars = variables_lib.trainable_variables()
self.assertEqual(2, len(true_vars))
self.assertEqual("scope/v/0:0", true_vars[0].name)
self.assertEqual("scope/v/1:0", true_vars[1].name)
self.assertEqual("custom_getter/add:0", v.name)
with self.cached_session() as sess:
variables_lib.global_variables_initializer().run()
np_vars, np_v = sess.run([true_vars, v])
self.assertAllClose(np_v, sum(np_vars))
def testNestedCustomGetters(self):
def sum_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/sum_0" % name, *args, **kwargs)
g_1 = getter("%s/sum_1" % name, *args, **kwargs)
with ops.name_scope("sum_getter"):
return g_0 + g_1
def prod_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/prod_0" % name, *args, **kwargs)
g_1 = getter("%s/prod_1" % name, *args, **kwargs)
with ops.name_scope("prod_getter"):
return g_0 * g_1
with variable_scope.variable_scope("prod_scope", custom_getter=prod_getter):
with variable_scope.variable_scope("sum_scope", custom_getter=sum_getter):
with variable_scope.variable_scope(
"inner_sum_scope", custom_getter=sum_getter):
# take sums of sums of products
v = variable_scope.get_variable("v", [1, 2, 3])
self.assertEqual([1, 2, 3], v.get_shape())
true_vars = variables_lib.trainable_variables()
self.assertEqual(8, len(true_vars))
template = (
"prod_scope/sum_scope/inner_sum_scope/v/sum_%d/sum_%d/prod_%d:0")
self.assertEqual(template % (0, 0, 0), true_vars[0].name)
self.assertEqual(template % (0, 0, 1), true_vars[1].name)
self.assertEqual(template % (0, 1, 0), true_vars[2].name)
self.assertEqual(template % (0, 1, 1), true_vars[3].name)
self.assertEqual(template % (1, 0, 0), true_vars[4].name)
self.assertEqual(template % (1, 0, 1), true_vars[5].name)
self.assertEqual(template % (1, 1, 0), true_vars[6].name)
self.assertEqual(template % (1, 1, 1), true_vars[7].name)
with self.cached_session() as sess:
variables_lib.global_variables_initializer().run()
np_vars, np_v = sess.run([true_vars, v])
# take products of sums of products
self.assertAllClose(
np_v, (((np_vars[0] * np_vars[1]) + (np_vars[2] * np_vars[3])) + (
(np_vars[4] * np_vars[5]) + (np_vars[6] * np_vars[7]))))
def testVariableCreator(self):
variable_names = []
def creator_a(next_creator, **kwargs):
variable_names.append(kwargs.get("name", ""))
return next_creator(**kwargs)
def creator_b(next_creator, **kwargs):
kwargs["name"] = "forced_name"
return next_creator(**kwargs)
with variable_scope.variable_creator_scope(creator_a):
with variable_scope.variable_creator_scope(creator_b):
variable_scope.variable(1.0, name="one_name")
self.assertAllEqual(variable_names, ["forced_name"])
called = [False]
def creater_c(next_creator, **kwargs):
called[0] = True
self.assertEqual(kwargs["synchronization"],
variable_scope.VariableSynchronization.ON_WRITE)
self.assertEqual(kwargs["aggregation"],
variable_scope.VariableAggregation.MEAN)
return next_creator(**kwargs)
with variable_scope.variable_creator_scope(creater_c):
variable_scope.get_variable(
"v", [],
synchronization=variable_scope.VariableSynchronization.ON_WRITE,
aggregation=variable_scope.VariableAggregation.MEAN)
self.assertTrue(called[0])
class PartitionInfoTest(test.TestCase):
def testConstructorChecks(self):
# Invalid arg types.
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=None, var_offset=[0, 1])
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset=None)
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape="foo", var_offset=[0, 1])
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset="foo")
# full_shape and var_offset must have same length.
with self.assertRaises(ValueError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset=[0])
# Offset must always be less than shape.
with self.assertRaises(ValueError):
variable_scope._PartitionInfo(full_shape=[1, 1], var_offset=[0, 1])
def testSingleOffset(self):
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
self.assertEqual(4, partition_info.single_offset([1, 3]))
# Tests when the variable isn't partitioned at all.
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[0, 0])
self.assertEqual(0, partition_info.single_offset([9, 3]))
def testSingleSliceDim(self):
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
# Invalid shape.
with self.assertRaises(TypeError):
partition_info.single_slice_dim(None)
# Rank of shape differs from full_shape.
with self.assertRaises(ValueError):
partition_info.single_slice_dim([1, 2, 3])
# Shape is too large given var_offset (4+6 > 9).
with self.assertRaises(ValueError):
partition_info.single_slice_dim([6, 3])
# Multiple possible slice dim from shape.
with self.assertRaises(ValueError):
partition_info.single_slice_dim([1, 1])
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[0, 0])
self.assertEqual(1, partition_info.single_slice_dim([9, 2]))
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
self.assertEqual(0, partition_info.single_slice_dim([2, 3]))
class VariableScopeMultithreadedTest(test.TestCase):
def testTwoThreadsDisjointScopeEntry(self):
def thread_fn(i, graph):
with graph.as_default():
with variable_scope.variable_scope("foo"):
if i == 0:
v = variable_scope.get_variable("v", [])
self.assertEquals("foo/v:0", v.name)
else:
# Any thread after the first one should fail to create variable
# with the same name.
with self.assertRaises(ValueError):
variable_scope.get_variable("v", [])
graph = ops.get_default_graph()
threads = [
threading.Thread(target=thread_fn, args=(
i,
graph,
)) for i in range(2)
]
threads[0].start()
# Allow thread 0 to finish before starting thread 1.
threads[0].join()
threads[1].start()
threads[1].join()
def testTwoThreadsNestedScopeEntry(self):
def thread_fn(i, graph, run_event, pause_event):
with graph.as_default():
with variable_scope.variable_scope("foo"):
if i == 0:
v = variable_scope.get_variable("v", [])
self.assertEquals("foo/v:0", v.name)
else:
# Any thread after the first one should fail to create variable
# with the same name.
with self.assertRaises(ValueError):
variable_scope.get_variable("v", [])
pause_event.set()
run_event.wait()
graph = ops.get_default_graph()
run_events = [threading.Event() for _ in range(2)]
pause_events = [threading.Event() for _ in range(2)]
threads = [
threading.Thread(
target=thread_fn, args=(i, graph, run_events[i], pause_events[i]))
for i in range(2)
]
# Start first thread.
threads[0].start()
pause_events[0].wait()
# Start next thread once the first thread has paused.
threads[1].start()
pause_events[1].wait()
# Resume both threads.
run_events[0].set()
run_events[1].set()
threads[0].join()
threads[1].join()
def testReenterMainScope(self):
def thread_fn(graph, main_thread_scope):
with graph.as_default():
# Variable created with main scope will have prefix "main".
with variable_scope.variable_scope(main_thread_scope):
with variable_scope.variable_scope("foo"):
v = variable_scope.get_variable("v", [])
self.assertEquals("main/foo/v:0", v.name)
# Variable created outside main scope will not have prefix "main".
with variable_scope.variable_scope("bar"):
v = variable_scope.get_variable("v", [])
self.assertEquals("bar/v:0", v.name)
graph = ops.get_default_graph()
with variable_scope.variable_scope("main") as main_thread_scope:
thread = threading.Thread(
target=thread_fn, args=(graph, main_thread_scope))
thread.start()
thread.join()
if __name__ == "__main__":
test.main()
| |
"""
Core components for fio-buffer
"""
import copy
import logging
from multiprocessing import cpu_count, Pool
import click
import fiona as fio
from fiona.transform import transform_geom
from shapely.geometry import CAP_STYLE
from shapely.geometry import JOIN_STYLE
from shapely.geometry import mapping
from shapely.geometry import shape
from . import __version__
logging.basicConfig()
logger = logging.getLogger('fio-buffer')
def _cb_cap_style(ctx, param, value):
"""
Click callback to transform `--cap-style` to an `int`.
"""
return getattr(CAP_STYLE, value)
def _cb_join_style(ctx, param, value):
"""
Click callback to transform `--join-style` to an `int`.
"""
return getattr(JOIN_STYLE, value)
def _cb_res(ctx, param, value):
"""
Click callback to ensure `--res` is `>= 0`.
"""
if value < 0:
raise click.BadParameter("must be a positive value")
return value
def _cb_dist(ctx, param, value):
"""
Click callback to ensure `--distance` can be either a float or a field name.
"""
try:
return float(value)
except ValueError:
return value
def _processor(args):
"""
Process a single feature.
Parameters
----------
args : dict
feat : dict
A GeoJSON feature to process.
src_crs : str or dict
The geometry's CRS.
buf_crs : str or dict
Apply buffer after reprojecting to this CRS.
dst_crs : str or dict
Reproject buffered geometry to this CRS before returning.
skip_failures : bool
If True then Exceptions don't stop processing.
buf_args : dict
Keyword arguments for the buffer operation.
Returns
-------
dict
GeoJSON feature with updated geometry.
"""
feat = args['feat']
src_crs = args['src_crs']
buf_crs = args['buf_crs']
dst_crs = args['dst_crs']
skip_failures = args['skip_failures']
buf_args = args['buf_args']
# Support buffering by a field's value
if not isinstance(buf_args['distance'], (float, int)):
field_val = feat['properties'][buf_args['distance']]
# Buffering according to a field but field is None so just return the feature
if field_val is None:
return feat
else:
buf_args['distance'] = field_val
try:
# src_crs -> buf_crs
reprojected = transform_geom(
src_crs, buf_crs, feat['geometry'],
antimeridian_cutting=True
)
# buffering operation
buffered = shape(reprojected).buffer(**buf_args)
# buf_crs -> dst_crs
feat['geometry'] = transform_geom(
buf_crs, dst_crs, mapping(buffered),
antimeridian_cutting=True
)
return feat
except Exception:
logger.exception("Feature with ID %s failed during buffering", feat.get('id'))
if not skip_failures:
raise
@click.command(short_help="Buffer geometries on all sides by a fixed distance.")
@click.version_option(prog_name='fio-buffer', version=__version__)
@click.argument('infile', required=True)
@click.argument('outfile', required=True)
@click.option(
'-f', '--format', '--driver', metavar='NAME',
help="Output driver name. Derived from the input datasource if not given."
)
@click.option(
'--cap-style', type=click.Choice(['flat', 'round', 'square']),
default='round', show_default=True,
callback=_cb_cap_style, help="Where geometries terminate, use this style."
)
@click.option(
'--join-style', type=click.Choice(['round', 'mitre', 'bevel']),
default='round', show_default=True,
callback=_cb_join_style, help="Where geometries touch, use this style."
)
@click.option(
'--res', type=click.INT, callback=_cb_res, default=16, show_default=True,
help="Resolution of the buffer around each vertex of the geometry."
)
@click.option(
'--mitre-limit', type=click.FLOAT, default=5.0, show_default=True,
help="When using a mitre join, limit the maximum length of the join corner according to "
"this ratio."
)
@click.option(
'--distance', metavar='FLOAT|FIELD', required=True, callback=_cb_dist,
help="Buffer distance or field containing distance values. Units match --buf-crs. "
"When buffering with a field, feature's with a null value are unaltered."
)
@click.option(
'--src-crs', help="Specify CRS for input data. Not needed if set in input file."
)
@click.option(
'--buf-crs', help="Perform buffer operations in a different CRS. [default: --src-crs]"
)
@click.option(
'--dst-crs', help="Reproject geometries to a different CRS before writing. Must be "
"combined with --buf-crs. [default: --src-crs]"
)
@click.option(
'--geom-type', 'output_geom_type', default='MultiPolygon',
metavar='GEOMTYPE', show_default=True,
help="Output layer's geometry type."
)
@click.option(
'--skip-failures', is_flag=True,
help="Skip geometries that fail somewhere in the processing pipeline."
)
@click.option(
'--jobs', type=click.IntRange(1, cpu_count()), default=1,
metavar="CORES", show_default=True,
help="Process geometries in parallel across N cores. Feature ID's and order are not "
"preserved if more that 1 cores are used."
)
@click.pass_context
def buffer(ctx, infile, outfile, driver, cap_style, join_style, res, mitre_limit,
distance, src_crs, buf_crs, dst_crs, output_geom_type, skip_failures, jobs):
"""
Geometries can be dilated with a positive distance, eroded with a negative
distance, and in some cases cleaned or repaired with a distance of 0.
\b
Examples
--------
Default settings - buffer geometries in the input CRS:
\b
$ fio buffer in.geojson out.geojson --distance 10
Dynamically buffer geometries by a distance stored in the field 'magnitude'
and write as GeoJSON:
\b
$ fio buffer \\
in.shp \\
out.geojson \\
--driver GeoJSON \\
--distance magnitude
Read geometries from one CRS, buffer in another, and then write to a third:
\b
$ fio buffer in.shp out.shp \\
--distance 10 \\
--buf-crs EPSG:3857 \\
--dst-crs EPSG:32618
Control cap style, mitre limit, segment resolution, and join style:
\b
$ fio buffer in.geojson out.geojson \\
--distance 0.1 \\
--res 5 \\
--cap-style flat \\
--join-style mitre \\
--mitre-limit 0.1\\
"""
if dst_crs and not buf_crs:
raise click.ClickException("Must specify --buf-crs when using --dst-crs.")
# fio has a -v flag so just use that to set the logging level
# Extra checks are so this plugin doesn't just completely crash due
# to upstream changes.
if isinstance(getattr(ctx, 'obj'), dict):
logger.setLevel(ctx.obj.get('verbosity', 1))
with fio.open(infile) as src:
logger.debug("Resolving CRS fall backs")
src_crs = src_crs or src.crs
buf_crs = buf_crs or src_crs
dst_crs = dst_crs or src_crs
if not src_crs:
raise click.ClickException(
"CRS is not set in input file. Use --src-crs to specify.")
logger.debug("src_crs=%s", src_crs)
logger.debug("buf_crs=%s", buf_crs)
logger.debug("dst_crs=%s", dst_crs)
meta = copy.deepcopy(src.meta)
meta.update(
driver=driver or src.driver,
crs=dst_crs
)
if output_geom_type:
meta['schema'].update(geometry=output_geom_type)
logger.debug("Creating output file %s", outfile)
logger.debug("Meta=%s", meta)
with fio.open(outfile, 'w', **meta) as dst:
# Keyword arguments for `<Geometry>.buffer()`
buf_args = {
'distance': distance,
'resolution': res,
'cap_style': cap_style,
'join_style': join_style,
'mitre_limit': mitre_limit
}
# A generator that produces the arguments required for `_processor()`
task_generator = (
{
'feat': feat,
'src_crs': src_crs,
'buf_crs': buf_crs,
'dst_crs': dst_crs,
'skip_failures': skip_failures,
'buf_args': buf_args
} for feat in src)
logger.debug("Starting processing on %s cores", jobs)
for o_feat in Pool(jobs).imap_unordered(_processor, task_generator):
if o_feat is not None:
try:
dst.write(o_feat)
except Exception:
logger.exception(
"Feature with ID %s failed during write", o_feat.get('id'))
if not skip_failures:
raise
logger.debug("Finished processing.")
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PublicIPAddressesOperations(object):
"""PublicIPAddressesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_cloud_service_public_ip_addresses(
self,
resource_group_name, # type: str
cloud_service_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets information about all public IP addresses on a cloud service level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_cloud_service_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_cloud_service_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/publicipaddresses'} # type: ignore
def list_cloud_service_role_instance_public_ip_addresses(
self,
resource_group_name, # type: str
cloud_service_name, # type: str
role_instance_name, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets information about all public IP addresses in a role instance IP configuration in a cloud
service.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:param role_instance_name: The name of role instance.
:type role_instance_name: str
:param network_interface_name: The network interface name.
:type network_interface_name: str
:param ip_configuration_name: The IP configuration name.
:type ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_cloud_service_role_instance_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'roleInstanceName': self._serialize.url("role_instance_name", role_instance_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_cloud_service_role_instance_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses'} # type: ignore
def get_cloud_service_public_ip_address(
self,
resource_group_name, # type: str
cloud_service_name, # type: str
role_instance_name, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
public_ip_address_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPAddress"
"""Get the specified public IP address in a cloud service.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:param role_instance_name: The role instance name.
:type role_instance_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the IP configuration.
:type ip_configuration_name: str
:param public_ip_address_name: The name of the public IP Address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get_cloud_service_public_ip_address.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'roleInstanceName': self._serialize.url("role_instance_name", role_instance_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_cloud_service_public_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPAddress"
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
parameters, # type: "_models.PublicIPAddress"
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPAddress"
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PublicIPAddress')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
parameters, # type: "_models.PublicIPAddress"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.PublicIPAddress"]
"""Creates or updates a static or dynamic public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to the create or update public IP address operation.
:type parameters: ~azure.mgmt.network.v2020_11_01.models.PublicIPAddress
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either PublicIPAddress or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_11_01.models.PublicIPAddress]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
public_ip_address_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPAddress"
"""Updates public IP address tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to update public IP address tags.
:type parameters: ~azure.mgmt.network.v2020_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets all the public IP addresses in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets all public IP addresses in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list_virtual_machine_scale_set_public_ip_addresses(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets information about all public IP addresses on a virtual machine scale set level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/publicipaddresses'} # type: ignore
def list_virtual_machine_scale_set_vm_public_ip_addresses(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPAddressListResult"]
"""Gets information about all public IP addresses in a virtual machine IP configuration in a
virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The network interface name.
:type network_interface_name: str
:param ip_configuration_name: The IP configuration name.
:type ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses'} # type: ignore
def get_virtual_machine_scale_set_public_ip_address(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
public_ip_address_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPAddress"
"""Get the specified public IP address in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the IP configuration.
:type ip_configuration_name: str
:param public_ip_address_name: The name of the public IP Address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_public_ip_address.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_public_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}'} # type: ignore
| |
# Copyright 2016-2017 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import datetime
from dateutil.parser import parse as date_parse
from .common import BaseTest
from .test_offhours import mock_datetime_now
from time import sleep
class ElasticBeanstalkEnvironment(BaseTest):
def test_resource_manager(self):
factory = self.replay_flight_data("test_elasticbeanstalk_describe_envs")
p = self.load_policy(
{"name": "eb-env-query", "resource": "elasticbeanstalk-environment"},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 2)
def test_eb_env_regex(self):
factory = self.replay_flight_data("test_elasticbeanstalk_describe_envs")
p = self.load_policy(
{
"name": "eb-find-inactive",
"resource": "elasticbeanstalk-environment",
"filters": [
{
"type": "value",
"key": "CNAME",
"op": "regex",
"value": ".*inactive.*",
}
],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_eb_env_uptime(self):
factory = self.replay_flight_data("test_elasticbeanstalk_describe_envs")
p = self.load_policy(
{
"name": "eb-find-inactive",
"resource": "elasticbeanstalk-environment",
"filters": [
{
"type": "value",
"key": "DateCreated",
"value": 1,
"value_type": "age",
"op": "greater-than",
}
],
},
session_factory=factory,
)
with mock_datetime_now(date_parse("2017-12-19"), datetime):
resources = p.run()
self.assertEqual(len(resources), 2)
class EbEnvBaseTest(BaseTest):
def query_env_status(self, session, env_name):
client = session.client("elasticbeanstalk")
res = client.describe_environments(EnvironmentNames=[env_name])
if len(res["Environments"]) > 0:
return res["Environments"][0]["Status"]
return None
def env_tags_dict(self, session, env_arn):
client = session.client("elasticbeanstalk")
tagres = client.list_tags_for_resource(ResourceArn=env_arn)
tags = tagres["ResourceTags"]
return {t["Key"]: t["Value"] for t in tags}
class TestTerminate(EbEnvBaseTest):
def test_eb_env_terminate(self):
envname = "c7n-eb-tag-test-inactive"
session_factory = self.replay_flight_data("test_eb_env_terminate")
assert self.query_env_status(session_factory(), envname) == "Ready"
p = self.load_policy(
{
"name": "eb-env-term",
"resource": "elasticbeanstalk-environment",
"filters": [{"EnvironmentName": envname}],
"actions": [{"type": "terminate"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
assert resources[0]["EnvironmentName"] == envname
assert self.query_env_status(session_factory(), envname) == "Terminating"
class TestEBEnvTagging(EbEnvBaseTest):
def test_tag_delayed(self):
envname = "c7n-eb-tag-test-inactive"
envarn = ("arn:aws:elasticbeanstalk:us-east-1:012345678901:"
"environment/re-jenkins/%s" % envname)
factory = self.replay_flight_data("test_elasticbeanstalk_env_tag_delayed")
p = self.load_policy(
{
"name": "eb-tag-delayed",
"resource": "elasticbeanstalk-environment",
"filters": [
{
"type": "value",
"key": "CNAME",
"op": "regex",
"value": ".*inactive.*",
}
],
"actions": [
{
"type": "mark-for-op",
"op": "terminate",
"days": 7,
"tag": "c7n-eb-tag-test",
}
],
},
session_factory=factory,
)
if self.recording:
resources = p.run()
else:
with mock_datetime_now(date_parse("2017-11-10"), datetime):
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["EnvironmentName"], envname)
if self.recording:
sleep(4)
while self.query_env_status(factory(), envname) != "Ready":
if self.recording:
sleep(30)
pass
self.assertEqual(
self.env_tags_dict(factory(), envarn).get("c7n-eb-tag-test"),
"Resource does not meet policy: terminate@2017/12/12",
)
def test_tag(self):
envname = "c7n-eb-tag-test-inactive"
envarn = ("arn:aws:elasticbeanstalk:us-east-1:012345678901:"
"environment/re-jenkins/%s" % envname)
factory = self.replay_flight_data("test_elasticbeanstalk_env_tag")
p = self.load_policy(
{
"name": "eb-tag",
"resource": "elasticbeanstalk-environment",
"filters": [
{
"type": "value",
"key": "CNAME",
"op": "regex",
"value": ".*inactive.*",
}
],
"actions": [
{"type": "tag", "key": "tagTestKey", "value": "tagTestValue"}
],
},
session_factory=factory,
)
if self.recording:
resources = p.run()
else:
with mock_datetime_now(date_parse("2017-11-10"), datetime):
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["EnvironmentName"], envname)
if self.recording:
sleep(5)
while self.query_env_status(factory(), envname) != "Ready":
if self.recording:
sleep(30)
pass
self.assertEqual(
self.env_tags_dict(factory(), envarn).get("tagTestKey"), "tagTestValue"
)
def test_unmark(self):
envname = "c7n-eb-tag-test-inactive"
envarn = ("arn:aws:elasticbeanstalk:us-east-1:012345678901:"
"environment/re-jenkins/%s" % envname)
factory = self.replay_flight_data("test_elasticbeanstalk_env_unmark")
p = self.load_policy(
{
"name": "eb-tag",
"resource": "elasticbeanstalk-environment",
"filters": [
{
"type": "value",
"key": "CNAME",
"op": "regex",
"value": ".*inactive.*",
}
],
"actions": [{"type": "remove-tag", "tags": ["tagTestKey"]}],
},
session_factory=factory,
)
if self.recording:
resources = p.run()
else:
with mock_datetime_now(date_parse("2017-11-10"), datetime):
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["EnvironmentName"], envname)
if self.recording:
sleep(5)
while self.query_env_status(factory(), envname) != "Ready":
if self.recording:
sleep(30)
pass
self.assertIsNone(self.env_tags_dict(factory(), envarn).get("tagTestKey"))
| |
import unittest
from learners import *
import model
kt_vals = {i: log(i+.5) - log(i+1) for i in range(32)}
def test_partition_bounds():
p = PTW(KT, depth=4)
assert p.get_child_string() == ""
p.update(0)
assert p.get_child_string() == "PTW^0"
assert p.log_prob == log(0.5)
p.update(0)
assert p.get_child_string() == "PTW^1"
assert p.completed_log_probs[0] == 0.0
assert p.completed_log_probs[1] != 0.0
p.update(0)
assert p.get_child_string() == "PTW^1 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^2"
p.update(0)
assert p.get_child_string() == "PTW^2 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^2 PTW^1"
p.update(0)
assert p.get_child_string() == "PTW^2 PTW^1 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^3"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^1"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^1 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^2"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^2 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^2 PTW^1"
p.update(0)
assert p.get_child_string() == "PTW^3 PTW^2 PTW^1 PTW^0"
p.update(0)
assert p.get_child_string() == "PTW^4"
try:
p.update(0)
assert False
except ValueError:
assert True
def test_depth_differences():
p2 = PTW(KT, depth=2)
p4 = PTW(KT, depth=4)
p8 = PTW(KT, depth=8)
# greater depth means greater resistance to splitting
for _ in range(2):
p2.update(0); p4.update(0); p8.update(0)
assert p2.total_loss > p4.total_loss
assert p4.total_loss > p8.total_loss
assert p8.total_loss > -p8.model_log_probs[0]
for _ in range(2):
p2.update(0); p4.update(0); p8.update(0)
assert p2.total_loss > p4.total_loss
assert p4.total_loss > p8.total_loss
assert p8.total_loss > -p8.model_log_probs[0]
# but our internal nodes are similar
assert p2.log_prob == p4.completed_log_probs[2]
for _ in range(4):
p4.update(0); p8.update(0)
assert p4.total_loss > p8.total_loss
assert p8.total_loss > -p8.model_log_probs[0]
assert p4.completed_log_probs[4] == p8.completed_log_probs[4]
def test_setup():
pt = PTW(KT, depth=2)
assert pt.predict(0) == 0.5
assert pt.predict(1) == 0.5
def test_first_steps_depth():
p = PTW(KT, depth=5)
p.update(0)
assert p.log_prob == log(0.5)
assert p.models[0].predict(0) == 0.75
assert p.predict(0) < 0.75
def test_prob_sum():
# probabilities of a discrete alphabet should sum to one
p = PTW(KT, depth=12)
assert approx(p.predict(0) + p.predict(1), 1)
p.update(0)
assert approx(p.predict(0) + p.predict(1), 1)
p.update(0)
print(p.predict(0), p.predict(1), p.predict(0) + p.predict(1))
assert approx(p.predict(0) + p.predict(1), 1, precision=4)
def test_model_update():
# the total loss having seen a symbol should equal the loss for predicting
# the signal
pt = PTW(KT)
mpt = model.PTW(model.KT)
for i in range(16):
assert approx(mpt.log_predict(0), pt.log_predict(0))
assert mpt.log_predict(0) == mpt.update(0)
assert pt.log_predict(0) == pt.update(0)
def test_improved_model():
# the probability of seeing a symbol should be greater once we've seen
# a symbol
pt = PTW(KT, depth=12)
for _ in range(10):
p0 = pt.predict(0)
pt.update(0)
assert pt.predict(0) > p0
p1 = pt.predict(1),
assert approx(p1 + pt.predict(0), 1, precision=4)
pt.update(1)
assert approx(pt.predict(1) + pt.predict(0), 1, precision=8)
assert pt.predict(1) > p1
def test_ptw_cost():
# there should be a small penalty to PTW if no switches have occurred
pt = PTW(KT, depth=5)
mpt = model.PTW(5, Base=model.KT)
kt = KT()
pt.update(0); kt.update(0); mpt.update(0)
for i in range(11):
assert approx(pt.update(0), mpt.update(0))
kt.update(0)
print(i+1, pt.predict(0), kt.predict(0), mpt.predict(0))
assert approx(pt.log_prob, mpt.log_prob)
assert pt.model_log_probs[0] == kt.log_prob
assert mpt.predict(0) < kt.predict(0)
assert approx(mpt.predict(0), pt.predict(0))
assert pt.predict(0) < kt.predict(0)
assert pt.predict(0) > pt.predict(1)
assert approx(pt.predict(0) + pt.predict(1), 1)
assert approx(kt.predict(0) + kt.predict(1), 1)
def test_all_sequences_sum():
"""
Generate all possible k-length binary sequences. Calculate the log prob of them all.
Make sure they sum to 1
"""
def test_compare_kt():
mKT = model.KT()
m_loss = 0
aKT = KT()
a_loss = 0
for _ in range(16):
m_loss += mKT.update(0)
a_loss += aKT.update(0)
assert approx(a_loss, m_loss, precision=15)
assert approx(mKT.predict(0), aKT.predict(0))
assert approx(mKT.predict(1), aKT.predict(1))
assert approx(aKT.predict(0) + aKT.predict(1), 1)
def test_compare_ptw_updates():
mPTW = model.PTW(13, Base=model.KT)
aPTW = PTW(KT, depth=13)
for i in range(128):
print(i)
assert approx(mPTW.update(0), aPTW.update(0))
def test_compare_ptw():
mPTW = model.PTW(1, Base=model.KT)
aPTW = PTW(KT, depth=1)
for _ in range(2):
assert mPTW.predict(0) == aPTW.predict(0)
assert mPTW.update(0) == aPTW.update(0)
try:
aPTW.update(0)
assert False
except ValueError:
assert True
mPTW = model.PTW(2, Base=model.KT)
aPTW = PTW(KT, depth=2)
for _ in range(4):
assert mPTW.predict(0) == aPTW.predict(0)
assert mPTW.update(0) == aPTW.update(0)
try:
aPTW.update(0)
assert False
except ValueError:
assert True
mPTW = model.PTW(12, Base=model.KT)
aPTW = PTW(KT, depth=12)
for _ in range(4):
assert approx(mPTW.predict(0), aPTW.predict(0))
assert approx(mPTW.update(0), aPTW.update(0))
assert approx(mPTW.log_prob, aPTW.log_prob)
for _ in range(2*4):
assert approx(mPTW.predict(0), aPTW.predict(0))
assert approx(mPTW.update(0), aPTW.update(0))
assert approx(mPTW.log_prob, aPTW.log_prob)
def test_log_store():
mls = model.LogStore()
als = LogStore()
for i in range(64):
mls.add(i)
als.add(i)
assert all([mls[j] == als[j] for j in range(len(mls))])
class DebugModel():
def __init__(self, t=None, tp1=None, left=None, right=None):
if tp1 is None:
tp1 = t
self.bounds = (t, tp1)
self.loss_bound = t
self.left = left
self.right = right
try:
self.num_steps = tp1-t+1
except:
self.num_steps = 0
def update(self, data):
if self.bounds[0] is None:
self.bounds = (data, data)
else:
self.bounds = (self.bounds[0], data)
self.num_steps += 1
@property
def log_prob(self):
if self.num_steps == 0:
return DebugModel()
else:
return DebugModel(*self.bounds)
def __repr__(self):
if self.left is None:
return "{}:{}".format(*self.bounds)
else:
return "{2}:{0}_{1}:{3}".format(self.left,
self.right,
*self.bounds)
def __len__(self):
return self.num_steps
'''
defunct at the moment
class DebugPTL(PTW):
def calculate_partition_loss(self, new_model, left_loss, new_loss):
if new_loss:
return DebugModel(left_loss.bounds[0], new_loss.bounds[1],
left=left_loss.bounds[1], right=new_loss.bounds[0])
else:
return DebugModel(*left_loss.bounds)
def test_debug_model():
t = DebugModel()
assert str(t) == "None:None"
assert len(t) == 0
t.update(0)
assert str(t) == "0:0"
assert len(t) == 1
t.update(1)
assert str(t) == "0:1"
assert len(t) == 2
def test_partition_list():
p = DebugPTL(DebugModel, depth=5)
p.update(0)
assert str(p._models) == "[0:0]"
assert str(p._losses) == "[0:0]"
p.update(1)
assert str(p._models) == "[0:1]"
assert str(p._losses) == "[0:0_1:1]"
p.update(2)
assert str(p._models) == "[0:2, 2:2]"
assert str(p._losses) == "[0:0_1:1, 2:2]"
p.update(3)
assert str(p._models) == "[0:3]"
assert str(p._losses) == "[0:1_2:3]"
p.update(4)
assert str(p._models) == "[0:4, 4:4]"
assert str(p._losses) == "[0:1_2:3, 4:4]"
p.update(5)
assert str(p._models) == "[0:5, 4:5]"
assert str(p._losses) == "[0:1_2:3, 4:4_5:5]"
p.update(6)
assert str(p._models) == "[0:6, 4:6, 6:6]"
assert str(p._losses) == "[0:1_2:3, 4:4_5:5, 6:6]"
for i in range(7, 15):
p.update(i)
assert str(p._models) == "[0:14, 8:14, 12:14, 14:14]"
assert str(p._losses) == "[0:3_4:7, 8:9_10:11, 12:12_13:13, 14:14]"
p.update(15)
assert str(p._models) == "[0:15]"
assert str(p._losses) == "[0:7_8:15]"
'''
'''
# very old tests
class PTWdValues(unittest.TestCase):
global sample_seq, ktp, pr
sample_seq = {'empty': (),
'single': (1,),
'single0': (0,),
'flipped': (1, 0),
'repeated': (1, 1),
'alternating': (1, 0, 1, 0),
'three': (1, 1, 1),
'four': (1, 1, 1, 1),
'five': (1, 1, 1, 1, 1),
'six': (1, 1, 1, 1, 1, 1),
'eight': (1, 1, 1, 1, 1, 1, 1, 1)}
ktp = {k: kt.KTModel(v).get_prob()
for k, v in sample_seq.items()}
pr = {'empty': [1.0 for _ in range(5)],
'single': [ktp['single'] for _ in range(5)],
'single0': [ktp['single'] for _ in range(5)],
'flipped': [PTWd.quick_calc(i,
ktp['flipped'],
ktp['single'],
ktp['single'])
for i in range(5)],
'repeated': [PTWd.quick_calc(i,
ktp['repeated'],
ktp['single'],
ktp['single'])
for i in range(5)]}
pr['alternating'] = [ktp['alternating'],
.5 * ktp['alternating'] + .5 * ktp['flipped'] ** 2,
.5 * ktp['alternating'] + .5 * pr['flipped'][1] ** 2
]
pr['four'] = [ktp['four'],
.5 * ktp['four'] + .5 * ktp['repeated'] ** 2,
.5 * ktp['four'] + .5 * pr['repeated'][1] ** 2]
pr['three'] = [ktp['three'],
.5 * ktp['three'] + .5 * ktp['repeated'] * ktp['single'],
.5 * ktp['three'] + .5 * pr['repeated'][1] * pr['single'][1]]
pr['five'] = [ktp['five'],
.5 * ktp['five'] + .5 * pr['repeated'][0] * pr['three'][0],
.5 * ktp['five'] + .5 * pr['four'][1] * pr['single'][1]]
pr['six'] = [ktp['six'],
.5 * ktp['six'] + .5 * pr['repeated'][0] * pr['four'][0],
.5 * ktp['six'] + .5 * pr['four'][1] * pr['repeated'][0]]
pr['eight'] = [ptw.ptw_recursive(i, kt.KTModel,
sample_seq['eight'],
(1, 0), False) for i in range(4)]
def test_constructor(self):
"""Constructor can take a sequence argument"""
for desc, probs in pr.items():
seq = sample_seq[desc]
for depth, prob in enumerate(probs):
lprob = log2(prob)
s = "of {0} at depth {1} should be {2}".format(desc,
depth,
prob)
if depth is not 0 and len(seq) > exp2(depth):
with self.assertRaises(ptw.SequenceLengthError) as cm:
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=seq)
the_exception = cm.exception
self.assertIsInstance(the_exception,
ptw.SequenceLengthError,
"Depth {0} and seq {1}".format(depth,
seq))
else:
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=seq)
self.assertEqual(list(m.sequence), list(seq),
"Should create " + desc + " sequence")
self.assertAlmostEqual(m.get_prob(),
prob,
msg = "Probability " + s,
places = PRECISION)
self.assertAlmostEqual(m.prob,
lprob,
msg = "Log probability of " + s,
places = PRECISION)
def test_extend_sequence(self):
"""Extending the general model should work as expected"""
for desc, probs in pr.items():
seq = sample_seq[desc]
for depth, prob in enumerate(probs):
if depth is not 0 and len(seq) > exp2(depth):
with self.assertRaises(ptw.SequenceLengthError) as cm:
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=seq)
the_exception = cm.exception
self.assertIsInstance(the_exception,
ptw.SequenceLengthError,
"Depth {0} and seq {1}".format(depth,
seq))
elif depth is not 0:
m = PTWd(depth, kt.KTModel)
m.extend_sequence(seq)
self.assertEqual(m.sequence, list(seq),
"Empty model should allow extension")
def test_conditional_prob_sum(self):
"""The conditional probability of all symbols should sum to one"""
for desc, probs in pr.items():
seq = sample_seq[desc]
for depth, prob in enumerate(probs):
if len(seq) + 1 < exp2(depth):
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=list(seq))
p1 = m.conditional_prob(1)
p2 = m.conditional_prob(0)
self.assertAlmostEqual(p1+p2, 1.0,
msg = "{0}: {1}".format(desc, seq),
places=PRECISION)
def test_sum_conditionals(self):
"""The conditional probability of all symbols should sum to one"""
for desc, probs in pr.items():
seq = sample_seq[desc]
for depth, prob in enumerate(probs):
if len(seq) + 1 < exp2(depth):
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=list(seq))
p1 = m.conditional_prob(1, log_form = True)
p2 = m.conditional_prob(0, log_form = True)
self.assertAlmostEqual(log_sum_exp([p1, p2]), 0.0,
msg = "{0}: {1}".format(desc, seq),
places=PRECISION)
p1 = exp2(p1)
p2 = exp2(p2)
self.assertAlmostEqual(p1+p2, 1.0,
msg = "{0}: {1}".format(desc, seq),
places=PRECISION)
def test_empty_conditional(self):
"""The conditional probability over empty should be the same as the direct prob"""
symbols = [0, 1]
for s in symbols:
for d in range(1, 5):
m = PTWd(d, kt.KTModel)
p1 = m.conditional_prob(s, log_form=False)
p2 = PTWd.calculate_prob(kt.KTModel, [s], symbols, log_form=False)
self.assertEqual(p1, p2,
msg = "Depth {0}: sym {1}".format(d, s))
def test_prod_conditional(self):
"""The current prob * cond prob should be the same as extending seq"""
for desc, probs in pr.items():
seq = sample_seq[desc]
symbols = (1, 0)
for depth, prob in enumerate(probs):
if len(seq) + 1 < exp2(depth):
for s in symbols:
m = PTWd(depth, kt.KTModel, symbols=(1, 0), sequence=list(seq))
cp = m.conditional_prob(s, log_form = True)
fp = cp + m.prob
m.update(s)
ep = m.prob
msg = "{0}: {1}".format(desc, seq),
self.assertAlmostEqual(fp, ep,
msg=msg,
places=PRECISION)
'''
| |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'bitmessageui.ui'
#
# Created: Mon Mar 23 22:18:07 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from bmconfigparser import BMConfigParser
from foldertree import AddressBookCompleter
from messageview import MessageView
from messagecompose import MessageCompose
import settingsmixin
from networkstatus import NetworkStatus
from blacklist import Blacklist
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig, encoding = QtCore.QCoreApplication.CodecForTr, n = None):
if n is None:
return QtGui.QApplication.translate(context, text, disambig, _encoding)
else:
return QtGui.QApplication.translate(context, text, disambig, _encoding, n)
except AttributeError:
def _translate(context, text, disambig, encoding = QtCore.QCoreApplication.CodecForTr, n = None):
if n is None:
return QtGui.QApplication.translate(context, text, disambig)
else:
return QtGui.QApplication.translate(context, text, disambig, QtCore.QCoreApplication.CodecForTr, n)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(885, 580)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/can-icon-24px.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setTabShape(QtGui.QTabWidget.Rounded)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout_10 = QtGui.QGridLayout(self.centralwidget)
self.gridLayout_10.setObjectName(_fromUtf8("gridLayout_10"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(0, 0))
self.tabWidget.setBaseSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(9)
self.tabWidget.setFont(font)
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.inbox = QtGui.QWidget()
self.inbox.setObjectName(_fromUtf8("inbox"))
self.gridLayout = QtGui.QGridLayout(self.inbox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.horizontalSplitter_3 = settingsmixin.SSplitter()
self.horizontalSplitter_3.setObjectName(_fromUtf8("horizontalSplitter_3"))
self.verticalSplitter_12 = settingsmixin.SSplitter()
self.verticalSplitter_12.setObjectName(_fromUtf8("verticalSplitter_12"))
self.verticalSplitter_12.setOrientation(QtCore.Qt.Vertical)
self.treeWidgetYourIdentities = settingsmixin.STreeWidget(self.inbox)
self.treeWidgetYourIdentities.setObjectName(_fromUtf8("treeWidgetYourIdentities"))
self.treeWidgetYourIdentities.resize(200, self.treeWidgetYourIdentities.height())
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/identities.png")), QtGui.QIcon.Selected, QtGui.QIcon.Off)
self.treeWidgetYourIdentities.headerItem().setIcon(0, icon1)
self.verticalSplitter_12.addWidget(self.treeWidgetYourIdentities)
self.pushButtonNewAddress = QtGui.QPushButton(self.inbox)
self.pushButtonNewAddress.setObjectName(_fromUtf8("pushButtonNewAddress"))
self.pushButtonNewAddress.resize(200, self.pushButtonNewAddress.height())
self.verticalSplitter_12.addWidget(self.pushButtonNewAddress)
self.verticalSplitter_12.setStretchFactor(0, 1)
self.verticalSplitter_12.setStretchFactor(1, 0)
self.verticalSplitter_12.setCollapsible(0, False)
self.verticalSplitter_12.setCollapsible(1, False)
self.verticalSplitter_12.handle(1).setEnabled(False)
self.horizontalSplitter_3.addWidget(self.verticalSplitter_12)
self.verticalSplitter_7 = settingsmixin.SSplitter()
self.verticalSplitter_7.setObjectName(_fromUtf8("verticalSplitter_7"))
self.verticalSplitter_7.setOrientation(QtCore.Qt.Vertical)
self.horizontalSplitterSearch = QtGui.QSplitter()
self.horizontalSplitterSearch.setObjectName(_fromUtf8("horizontalSplitterSearch"))
self.inboxSearchLineEdit = QtGui.QLineEdit(self.inbox)
self.inboxSearchLineEdit.setObjectName(_fromUtf8("inboxSearchLineEdit"))
self.horizontalSplitterSearch.addWidget(self.inboxSearchLineEdit)
self.inboxSearchOption = QtGui.QComboBox(self.inbox)
self.inboxSearchOption.setObjectName(_fromUtf8("inboxSearchOption"))
self.inboxSearchOption.addItem(_fromUtf8(""))
self.inboxSearchOption.addItem(_fromUtf8(""))
self.inboxSearchOption.addItem(_fromUtf8(""))
self.inboxSearchOption.addItem(_fromUtf8(""))
self.inboxSearchOption.addItem(_fromUtf8(""))
self.inboxSearchOption.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.horizontalSplitterSearch.addWidget(self.inboxSearchOption)
self.horizontalSplitterSearch.handle(1).setEnabled(False)
self.horizontalSplitterSearch.setStretchFactor(0, 1)
self.horizontalSplitterSearch.setStretchFactor(1, 0)
self.verticalSplitter_7.addWidget(self.horizontalSplitterSearch)
self.tableWidgetInbox = settingsmixin.STableWidget(self.inbox)
self.tableWidgetInbox.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetInbox.setAlternatingRowColors(True)
self.tableWidgetInbox.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetInbox.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetInbox.setWordWrap(False)
self.tableWidgetInbox.setObjectName(_fromUtf8("tableWidgetInbox"))
self.tableWidgetInbox.setColumnCount(4)
self.tableWidgetInbox.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(3, item)
self.tableWidgetInbox.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetInbox.horizontalHeader().setDefaultSectionSize(200)
self.tableWidgetInbox.horizontalHeader().setHighlightSections(False)
self.tableWidgetInbox.horizontalHeader().setMinimumSectionSize(27)
self.tableWidgetInbox.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetInbox.horizontalHeader().setStretchLastSection(True)
self.tableWidgetInbox.verticalHeader().setVisible(False)
self.tableWidgetInbox.verticalHeader().setDefaultSectionSize(26)
self.verticalSplitter_7.addWidget(self.tableWidgetInbox)
self.textEditInboxMessage = MessageView(self.inbox)
self.textEditInboxMessage.setBaseSize(QtCore.QSize(0, 500))
self.textEditInboxMessage.setReadOnly(True)
self.textEditInboxMessage.setObjectName(_fromUtf8("textEditInboxMessage"))
self.verticalSplitter_7.addWidget(self.textEditInboxMessage)
self.verticalSplitter_7.setStretchFactor(0, 0)
self.verticalSplitter_7.setStretchFactor(1, 1)
self.verticalSplitter_7.setStretchFactor(2, 2)
self.verticalSplitter_7.setCollapsible(0, False)
self.verticalSplitter_7.setCollapsible(1, False)
self.verticalSplitter_7.setCollapsible(2, False)
self.verticalSplitter_7.handle(1).setEnabled(False)
self.horizontalSplitter_3.addWidget(self.verticalSplitter_7)
self.horizontalSplitter_3.setStretchFactor(0, 0)
self.horizontalSplitter_3.setStretchFactor(1, 1)
self.horizontalSplitter_3.setCollapsible(0, False)
self.horizontalSplitter_3.setCollapsible(1, False)
self.gridLayout.addWidget(self.horizontalSplitter_3)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/inbox.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.inbox, icon2, _fromUtf8(""))
self.send = QtGui.QWidget()
self.send.setObjectName(_fromUtf8("send"))
self.gridLayout_7 = QtGui.QGridLayout(self.send)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.horizontalSplitter = settingsmixin.SSplitter()
self.horizontalSplitter.setObjectName(_fromUtf8("horizontalSplitter"))
self.verticalSplitter_2 = settingsmixin.SSplitter()
self.verticalSplitter_2.setObjectName(_fromUtf8("verticalSplitter_2"))
self.verticalSplitter_2.setOrientation(QtCore.Qt.Vertical)
self.tableWidgetAddressBook = settingsmixin.STableWidget(self.send)
self.tableWidgetAddressBook.setAlternatingRowColors(True)
self.tableWidgetAddressBook.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetAddressBook.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetAddressBook.setObjectName(_fromUtf8("tableWidgetAddressBook"))
self.tableWidgetAddressBook.setColumnCount(2)
self.tableWidgetAddressBook.setRowCount(0)
self.tableWidgetAddressBook.resize(200, self.tableWidgetAddressBook.height())
item = QtGui.QTableWidgetItem()
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/addressbook.png")), QtGui.QIcon.Selected, QtGui.QIcon.Off)
item.setIcon(icon3)
self.tableWidgetAddressBook.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetAddressBook.setHorizontalHeaderItem(1, item)
self.tableWidgetAddressBook.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetAddressBook.horizontalHeader().setDefaultSectionSize(200)
self.tableWidgetAddressBook.horizontalHeader().setHighlightSections(False)
self.tableWidgetAddressBook.horizontalHeader().setStretchLastSection(True)
self.tableWidgetAddressBook.verticalHeader().setVisible(False)
self.verticalSplitter_2.addWidget(self.tableWidgetAddressBook)
self.addressBookCompleter = AddressBookCompleter()
self.addressBookCompleter.setCompletionMode(QtGui.QCompleter.PopupCompletion)
self.addressBookCompleter.setCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.addressBookCompleterModel = QtGui.QStringListModel()
self.addressBookCompleter.setModel(self.addressBookCompleterModel)
self.pushButtonAddAddressBook = QtGui.QPushButton(self.send)
self.pushButtonAddAddressBook.setObjectName(_fromUtf8("pushButtonAddAddressBook"))
self.pushButtonAddAddressBook.resize(200, self.pushButtonAddAddressBook.height())
self.verticalSplitter_2.addWidget(self.pushButtonAddAddressBook)
self.pushButtonFetchNamecoinID = QtGui.QPushButton(self.send)
self.pushButtonFetchNamecoinID.resize(200, self.pushButtonFetchNamecoinID.height())
self.pushButtonFetchNamecoinID.setObjectName(_fromUtf8("pushButtonFetchNamecoinID"))
self.verticalSplitter_2.addWidget(self.pushButtonFetchNamecoinID)
self.verticalSplitter_2.setStretchFactor(0, 1)
self.verticalSplitter_2.setStretchFactor(1, 0)
self.verticalSplitter_2.setStretchFactor(2, 0)
self.verticalSplitter_2.setCollapsible(0, False)
self.verticalSplitter_2.setCollapsible(1, False)
self.verticalSplitter_2.setCollapsible(2, False)
self.verticalSplitter_2.handle(1).setEnabled(False)
self.verticalSplitter_2.handle(2).setEnabled(False)
self.horizontalSplitter.addWidget(self.verticalSplitter_2)
self.verticalSplitter = settingsmixin.SSplitter()
self.verticalSplitter.setObjectName(_fromUtf8("verticalSplitter"))
self.verticalSplitter.setOrientation(QtCore.Qt.Vertical)
self.tabWidgetSend = QtGui.QTabWidget(self.send)
self.tabWidgetSend.setObjectName(_fromUtf8("tabWidgetSend"))
self.sendDirect = QtGui.QWidget()
self.sendDirect.setObjectName(_fromUtf8("sendDirect"))
self.gridLayout_8 = QtGui.QGridLayout(self.sendDirect)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.verticalSplitter_5 = settingsmixin.SSplitter()
self.verticalSplitter_5.setObjectName(_fromUtf8("verticalSplitter_5"))
self.verticalSplitter_5.setOrientation(QtCore.Qt.Vertical)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_3 = QtGui.QLabel(self.sendDirect)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 2, 0, 1, 1)
self.label_2 = QtGui.QLabel(self.sendDirect)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.lineEditSubject = QtGui.QLineEdit(self.sendDirect)
self.lineEditSubject.setText(_fromUtf8(""))
self.lineEditSubject.setObjectName(_fromUtf8("lineEditSubject"))
self.gridLayout_2.addWidget(self.lineEditSubject, 2, 1, 1, 1)
self.label = QtGui.QLabel(self.sendDirect)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout_2.addWidget(self.label, 1, 0, 1, 1)
self.comboBoxSendFrom = QtGui.QComboBox(self.sendDirect)
self.comboBoxSendFrom.setMinimumSize(QtCore.QSize(300, 0))
self.comboBoxSendFrom.setObjectName(_fromUtf8("comboBoxSendFrom"))
self.gridLayout_2.addWidget(self.comboBoxSendFrom, 0, 1, 1, 1)
self.lineEditTo = QtGui.QLineEdit(self.sendDirect)
self.lineEditTo.setObjectName(_fromUtf8("lineEditTo"))
self.gridLayout_2.addWidget(self.lineEditTo, 1, 1, 1, 1)
self.lineEditTo.setCompleter(self.addressBookCompleter)
self.gridLayout_2_Widget = QtGui.QWidget()
self.gridLayout_2_Widget.setLayout(self.gridLayout_2)
self.verticalSplitter_5.addWidget(self.gridLayout_2_Widget)
self.textEditMessage = MessageCompose(self.sendDirect)
self.textEditMessage.setObjectName(_fromUtf8("textEditMessage"))
self.verticalSplitter_5.addWidget(self.textEditMessage)
self.verticalSplitter_5.setStretchFactor(0, 0)
self.verticalSplitter_5.setStretchFactor(1, 1)
self.verticalSplitter_5.setCollapsible(0, False)
self.verticalSplitter_5.setCollapsible(1, False)
self.verticalSplitter_5.handle(1).setEnabled(False)
self.gridLayout_8.addWidget(self.verticalSplitter_5, 0, 0, 1, 1)
self.tabWidgetSend.addTab(self.sendDirect, _fromUtf8(""))
self.sendBroadcast = QtGui.QWidget()
self.sendBroadcast.setObjectName(_fromUtf8("sendBroadcast"))
self.gridLayout_9 = QtGui.QGridLayout(self.sendBroadcast)
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.verticalSplitter_6 = settingsmixin.SSplitter()
self.verticalSplitter_6.setObjectName(_fromUtf8("verticalSplitter_6"))
self.verticalSplitter_6.setOrientation(QtCore.Qt.Vertical)
self.gridLayout_5 = QtGui.QGridLayout()
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.label_8 = QtGui.QLabel(self.sendBroadcast)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout_5.addWidget(self.label_8, 0, 0, 1, 1)
self.lineEditSubjectBroadcast = QtGui.QLineEdit(self.sendBroadcast)
self.lineEditSubjectBroadcast.setText(_fromUtf8(""))
self.lineEditSubjectBroadcast.setObjectName(_fromUtf8("lineEditSubjectBroadcast"))
self.gridLayout_5.addWidget(self.lineEditSubjectBroadcast, 1, 1, 1, 1)
self.label_7 = QtGui.QLabel(self.sendBroadcast)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_5.addWidget(self.label_7, 1, 0, 1, 1)
self.comboBoxSendFromBroadcast = QtGui.QComboBox(self.sendBroadcast)
self.comboBoxSendFromBroadcast.setMinimumSize(QtCore.QSize(300, 0))
self.comboBoxSendFromBroadcast.setObjectName(_fromUtf8("comboBoxSendFromBroadcast"))
self.gridLayout_5.addWidget(self.comboBoxSendFromBroadcast, 0, 1, 1, 1)
self.gridLayout_5_Widget = QtGui.QWidget()
self.gridLayout_5_Widget.setLayout(self.gridLayout_5)
self.verticalSplitter_6.addWidget(self.gridLayout_5_Widget)
self.textEditMessageBroadcast = MessageCompose(self.sendBroadcast)
self.textEditMessageBroadcast.setObjectName(_fromUtf8("textEditMessageBroadcast"))
self.verticalSplitter_6.addWidget(self.textEditMessageBroadcast)
self.verticalSplitter_6.setStretchFactor(0, 0)
self.verticalSplitter_6.setStretchFactor(1, 1)
self.verticalSplitter_6.setCollapsible(0, False)
self.verticalSplitter_6.setCollapsible(1, False)
self.verticalSplitter_6.handle(1).setEnabled(False)
self.gridLayout_9.addWidget(self.verticalSplitter_6, 0, 0, 1, 1)
self.tabWidgetSend.addTab(self.sendBroadcast, _fromUtf8(""))
self.verticalSplitter.addWidget(self.tabWidgetSend)
self.tTLContainer = QtGui.QWidget()
self.tTLContainer.setSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.tTLContainer.setLayout(self.horizontalLayout_5)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.pushButtonTTL = QtGui.QPushButton(self.send)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButtonTTL.sizePolicy().hasHeightForWidth())
self.pushButtonTTL.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.pushButtonTTL.setPalette(palette)
font = QtGui.QFont()
font.setUnderline(True)
self.pushButtonTTL.setFont(font)
self.pushButtonTTL.setFlat(True)
self.pushButtonTTL.setObjectName(_fromUtf8("pushButtonTTL"))
self.horizontalLayout_5.addWidget(self.pushButtonTTL, 0, QtCore.Qt.AlignRight)
self.horizontalSliderTTL = QtGui.QSlider(self.send)
self.horizontalSliderTTL.setMinimumSize(QtCore.QSize(70, 0))
self.horizontalSliderTTL.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSliderTTL.setInvertedAppearance(False)
self.horizontalSliderTTL.setInvertedControls(False)
self.horizontalSliderTTL.setObjectName(_fromUtf8("horizontalSliderTTL"))
self.horizontalLayout_5.addWidget(self.horizontalSliderTTL, 0, QtCore.Qt.AlignLeft)
self.labelHumanFriendlyTTLDescription = QtGui.QLabel(self.send)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelHumanFriendlyTTLDescription.sizePolicy().hasHeightForWidth())
self.labelHumanFriendlyTTLDescription.setSizePolicy(sizePolicy)
self.labelHumanFriendlyTTLDescription.setMinimumSize(QtCore.QSize(45, 0))
self.labelHumanFriendlyTTLDescription.setObjectName(_fromUtf8("labelHumanFriendlyTTLDescription"))
self.horizontalLayout_5.addWidget(self.labelHumanFriendlyTTLDescription, 1, QtCore.Qt.AlignLeft)
self.pushButtonClear = QtGui.QPushButton(self.send)
self.pushButtonClear.setObjectName(_fromUtf8("pushButtonClear"))
self.horizontalLayout_5.addWidget(self.pushButtonClear, 0, QtCore.Qt.AlignRight)
self.pushButtonSend = QtGui.QPushButton(self.send)
self.pushButtonSend.setObjectName(_fromUtf8("pushButtonSend"))
self.horizontalLayout_5.addWidget(self.pushButtonSend, 0, QtCore.Qt.AlignRight)
self.horizontalSliderTTL.setMaximumSize(QtCore.QSize(105, self.pushButtonSend.height()))
self.verticalSplitter.addWidget(self.tTLContainer)
self.tTLContainer.adjustSize()
self.verticalSplitter.setStretchFactor(1, 0)
self.verticalSplitter.setStretchFactor(0, 1)
self.verticalSplitter.setCollapsible(0, False)
self.verticalSplitter.setCollapsible(1, False)
self.verticalSplitter.handle(1).setEnabled(False)
self.horizontalSplitter.addWidget(self.verticalSplitter)
self.horizontalSplitter.setStretchFactor(0, 0)
self.horizontalSplitter.setStretchFactor(1, 1)
self.horizontalSplitter.setCollapsible(0, False)
self.horizontalSplitter.setCollapsible(1, False)
self.gridLayout_7.addWidget(self.horizontalSplitter, 0, 0, 1, 1)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/send.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.send, icon4, _fromUtf8(""))
self.subscriptions = QtGui.QWidget()
self.subscriptions.setObjectName(_fromUtf8("subscriptions"))
self.gridLayout_3 = QtGui.QGridLayout(self.subscriptions)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.horizontalSplitter_4 = settingsmixin.SSplitter()
self.horizontalSplitter_4.setObjectName(_fromUtf8("horizontalSplitter_4"))
self.verticalSplitter_3 = settingsmixin.SSplitter()
self.verticalSplitter_3.setObjectName(_fromUtf8("verticalSplitter_3"))
self.verticalSplitter_3.setOrientation(QtCore.Qt.Vertical)
self.treeWidgetSubscriptions = settingsmixin.STreeWidget(self.subscriptions)
self.treeWidgetSubscriptions.setAlternatingRowColors(True)
self.treeWidgetSubscriptions.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.treeWidgetSubscriptions.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetSubscriptions.setObjectName(_fromUtf8("treeWidgetSubscriptions"))
self.treeWidgetSubscriptions.resize(200, self.treeWidgetSubscriptions.height())
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/subscriptions.png")), QtGui.QIcon.Selected, QtGui.QIcon.Off)
self.treeWidgetSubscriptions.headerItem().setIcon(0, icon5)
self.verticalSplitter_3.addWidget(self.treeWidgetSubscriptions)
self.pushButtonAddSubscription = QtGui.QPushButton(self.subscriptions)
self.pushButtonAddSubscription.setObjectName(_fromUtf8("pushButtonAddSubscription"))
self.pushButtonAddSubscription.resize(200, self.pushButtonAddSubscription.height())
self.verticalSplitter_3.addWidget(self.pushButtonAddSubscription)
self.verticalSplitter_3.setStretchFactor(0, 1)
self.verticalSplitter_3.setStretchFactor(1, 0)
self.verticalSplitter_3.setCollapsible(0, False)
self.verticalSplitter_3.setCollapsible(1, False)
self.verticalSplitter_3.handle(1).setEnabled(False)
self.horizontalSplitter_4.addWidget(self.verticalSplitter_3)
self.verticalSplitter_4 = settingsmixin.SSplitter()
self.verticalSplitter_4.setObjectName(_fromUtf8("verticalSplitter_4"))
self.verticalSplitter_4.setOrientation(QtCore.Qt.Vertical)
self.horizontalSplitter_2 = QtGui.QSplitter()
self.horizontalSplitter_2.setObjectName(_fromUtf8("horizontalSplitter_2"))
self.inboxSearchLineEditSubscriptions = QtGui.QLineEdit(self.subscriptions)
self.inboxSearchLineEditSubscriptions.setObjectName(_fromUtf8("inboxSearchLineEditSubscriptions"))
self.horizontalSplitter_2.addWidget(self.inboxSearchLineEditSubscriptions)
self.inboxSearchOptionSubscriptions = QtGui.QComboBox(self.subscriptions)
self.inboxSearchOptionSubscriptions.setObjectName(_fromUtf8("inboxSearchOptionSubscriptions"))
self.inboxSearchOptionSubscriptions.addItem(_fromUtf8(""))
self.inboxSearchOptionSubscriptions.addItem(_fromUtf8(""))
self.inboxSearchOptionSubscriptions.addItem(_fromUtf8(""))
self.inboxSearchOptionSubscriptions.addItem(_fromUtf8(""))
self.inboxSearchOptionSubscriptions.addItem(_fromUtf8(""))
self.inboxSearchOptionSubscriptions.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.horizontalSplitter_2.addWidget(self.inboxSearchOptionSubscriptions)
self.horizontalSplitter_2.handle(1).setEnabled(False)
self.horizontalSplitter_2.setStretchFactor(0, 1)
self.horizontalSplitter_2.setStretchFactor(1, 0)
self.verticalSplitter_4.addWidget(self.horizontalSplitter_2)
self.tableWidgetInboxSubscriptions = settingsmixin.STableWidget(self.subscriptions)
self.tableWidgetInboxSubscriptions.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetInboxSubscriptions.setAlternatingRowColors(True)
self.tableWidgetInboxSubscriptions.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetInboxSubscriptions.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetInboxSubscriptions.setWordWrap(False)
self.tableWidgetInboxSubscriptions.setObjectName(_fromUtf8("tableWidgetInboxSubscriptions"))
self.tableWidgetInboxSubscriptions.setColumnCount(4)
self.tableWidgetInboxSubscriptions.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxSubscriptions.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxSubscriptions.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxSubscriptions.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxSubscriptions.setHorizontalHeaderItem(3, item)
self.tableWidgetInboxSubscriptions.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetInboxSubscriptions.horizontalHeader().setDefaultSectionSize(200)
self.tableWidgetInboxSubscriptions.horizontalHeader().setHighlightSections(False)
self.tableWidgetInboxSubscriptions.horizontalHeader().setMinimumSectionSize(27)
self.tableWidgetInboxSubscriptions.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetInboxSubscriptions.horizontalHeader().setStretchLastSection(True)
self.tableWidgetInboxSubscriptions.verticalHeader().setVisible(False)
self.tableWidgetInboxSubscriptions.verticalHeader().setDefaultSectionSize(26)
self.verticalSplitter_4.addWidget(self.tableWidgetInboxSubscriptions)
self.textEditInboxMessageSubscriptions = MessageView(self.subscriptions)
self.textEditInboxMessageSubscriptions.setBaseSize(QtCore.QSize(0, 500))
self.textEditInboxMessageSubscriptions.setReadOnly(True)
self.textEditInboxMessageSubscriptions.setObjectName(_fromUtf8("textEditInboxMessageSubscriptions"))
self.verticalSplitter_4.addWidget(self.textEditInboxMessageSubscriptions)
self.verticalSplitter_4.setStretchFactor(0, 0)
self.verticalSplitter_4.setStretchFactor(1, 1)
self.verticalSplitter_4.setStretchFactor(2, 2)
self.verticalSplitter_4.setCollapsible(0, False)
self.verticalSplitter_4.setCollapsible(1, False)
self.verticalSplitter_4.setCollapsible(2, False)
self.verticalSplitter_4.handle(1).setEnabled(False)
self.horizontalSplitter_4.addWidget(self.verticalSplitter_4)
self.horizontalSplitter_4.setStretchFactor(0, 0)
self.horizontalSplitter_4.setStretchFactor(1, 1)
self.horizontalSplitter_4.setCollapsible(0, False)
self.horizontalSplitter_4.setCollapsible(1, False)
self.gridLayout_3.addWidget(self.horizontalSplitter_4, 0, 0, 1, 1)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/subscriptions.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.subscriptions, icon6, _fromUtf8(""))
self.chans = QtGui.QWidget()
self.chans.setObjectName(_fromUtf8("chans"))
self.gridLayout_4 = QtGui.QGridLayout(self.chans)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.horizontalSplitter_7 = settingsmixin.SSplitter()
self.horizontalSplitter_7.setObjectName(_fromUtf8("horizontalSplitter_7"))
self.verticalSplitter_17 = settingsmixin.SSplitter()
self.verticalSplitter_17.setObjectName(_fromUtf8("verticalSplitter_17"))
self.verticalSplitter_17.setOrientation(QtCore.Qt.Vertical)
self.treeWidgetChans = settingsmixin.STreeWidget(self.chans)
self.treeWidgetChans.setFrameShadow(QtGui.QFrame.Sunken)
self.treeWidgetChans.setLineWidth(1)
self.treeWidgetChans.setAlternatingRowColors(True)
self.treeWidgetChans.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.treeWidgetChans.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetChans.setObjectName(_fromUtf8("treeWidgetChans"))
self.treeWidgetChans.resize(200, self.treeWidgetChans.height())
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/can-icon-16px.png")), QtGui.QIcon.Selected, QtGui.QIcon.Off)
self.treeWidgetChans.headerItem().setIcon(0, icon7)
self.verticalSplitter_17.addWidget(self.treeWidgetChans)
self.pushButtonAddChan = QtGui.QPushButton(self.chans)
self.pushButtonAddChan.setObjectName(_fromUtf8("pushButtonAddChan"))
self.pushButtonAddChan.resize(200, self.pushButtonAddChan.height())
self.verticalSplitter_17.addWidget(self.pushButtonAddChan)
self.verticalSplitter_17.setStretchFactor(0, 1)
self.verticalSplitter_17.setStretchFactor(1, 0)
self.verticalSplitter_17.setCollapsible(0, False)
self.verticalSplitter_17.setCollapsible(1, False)
self.verticalSplitter_17.handle(1).setEnabled(False)
self.horizontalSplitter_7.addWidget(self.verticalSplitter_17)
self.verticalSplitter_8 = settingsmixin.SSplitter()
self.verticalSplitter_8.setObjectName(_fromUtf8("verticalSplitter_8"))
self.verticalSplitter_8.setOrientation(QtCore.Qt.Vertical)
self.horizontalSplitter_6 = QtGui.QSplitter()
self.horizontalSplitter_6.setObjectName(_fromUtf8("horizontalSplitter_6"))
self.inboxSearchLineEditChans = QtGui.QLineEdit(self.chans)
self.inboxSearchLineEditChans.setObjectName(_fromUtf8("inboxSearchLineEditChans"))
self.horizontalSplitter_6.addWidget(self.inboxSearchLineEditChans)
self.inboxSearchOptionChans = QtGui.QComboBox(self.chans)
self.inboxSearchOptionChans.setObjectName(_fromUtf8("inboxSearchOptionChans"))
self.inboxSearchOptionChans.addItem(_fromUtf8(""))
self.inboxSearchOptionChans.addItem(_fromUtf8(""))
self.inboxSearchOptionChans.addItem(_fromUtf8(""))
self.inboxSearchOptionChans.addItem(_fromUtf8(""))
self.inboxSearchOptionChans.addItem(_fromUtf8(""))
self.inboxSearchOptionChans.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.horizontalSplitter_6.addWidget(self.inboxSearchOptionChans)
self.horizontalSplitter_6.handle(1).setEnabled(False)
self.horizontalSplitter_6.setStretchFactor(0, 1)
self.horizontalSplitter_6.setStretchFactor(1, 0)
self.verticalSplitter_8.addWidget(self.horizontalSplitter_6)
self.tableWidgetInboxChans = settingsmixin.STableWidget(self.chans)
self.tableWidgetInboxChans.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetInboxChans.setAlternatingRowColors(True)
self.tableWidgetInboxChans.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetInboxChans.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetInboxChans.setWordWrap(False)
self.tableWidgetInboxChans.setObjectName(_fromUtf8("tableWidgetInboxChans"))
self.tableWidgetInboxChans.setColumnCount(4)
self.tableWidgetInboxChans.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxChans.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxChans.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxChans.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInboxChans.setHorizontalHeaderItem(3, item)
self.tableWidgetInboxChans.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetInboxChans.horizontalHeader().setDefaultSectionSize(200)
self.tableWidgetInboxChans.horizontalHeader().setHighlightSections(False)
self.tableWidgetInboxChans.horizontalHeader().setMinimumSectionSize(27)
self.tableWidgetInboxChans.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetInboxChans.horizontalHeader().setStretchLastSection(True)
self.tableWidgetInboxChans.verticalHeader().setVisible(False)
self.tableWidgetInboxChans.verticalHeader().setDefaultSectionSize(26)
self.verticalSplitter_8.addWidget(self.tableWidgetInboxChans)
self.textEditInboxMessageChans = MessageView(self.chans)
self.textEditInboxMessageChans.setBaseSize(QtCore.QSize(0, 500))
self.textEditInboxMessageChans.setReadOnly(True)
self.textEditInboxMessageChans.setObjectName(_fromUtf8("textEditInboxMessageChans"))
self.verticalSplitter_8.addWidget(self.textEditInboxMessageChans)
self.verticalSplitter_8.setStretchFactor(0, 0)
self.verticalSplitter_8.setStretchFactor(1, 1)
self.verticalSplitter_8.setStretchFactor(2, 2)
self.verticalSplitter_8.setCollapsible(0, False)
self.verticalSplitter_8.setCollapsible(1, False)
self.verticalSplitter_8.setCollapsible(2, False)
self.verticalSplitter_8.handle(1).setEnabled(False)
self.horizontalSplitter_7.addWidget(self.verticalSplitter_8)
self.horizontalSplitter_7.setStretchFactor(0, 0)
self.horizontalSplitter_7.setStretchFactor(1, 1)
self.horizontalSplitter_7.setCollapsible(0, False)
self.horizontalSplitter_7.setCollapsible(1, False)
self.gridLayout_4.addWidget(self.horizontalSplitter_7, 0, 0, 1, 1)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/can-icon-16px.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.chans, icon8, _fromUtf8(""))
self.blackwhitelist = Blacklist()
self.tabWidget.addTab(self.blackwhitelist, QtGui.QIcon(":/newPrefix/images/blacklist.png"), "")
# Initialize the Blacklist or Whitelist
if BMConfigParser().get('bitmessagesettings', 'blackwhitelist') == 'white':
self.blackwhitelist.radioButtonWhitelist.click()
self.blackwhitelist.rerenderBlackWhiteList()
self.networkstatus = NetworkStatus()
self.tabWidget.addTab(self.networkstatus, QtGui.QIcon(":/newPrefix/images/networkstatus.png"), "")
self.gridLayout_10.addWidget(self.tabWidget, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 885, 27))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuSettings = QtGui.QMenu(self.menubar)
self.menuSettings.setObjectName(_fromUtf8("menuSettings"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setMaximumSize(QtCore.QSize(16777215, 22))
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.actionImport_keys = QtGui.QAction(MainWindow)
self.actionImport_keys.setObjectName(_fromUtf8("actionImport_keys"))
self.actionManageKeys = QtGui.QAction(MainWindow)
self.actionManageKeys.setCheckable(False)
self.actionManageKeys.setEnabled(True)
icon = QtGui.QIcon.fromTheme(_fromUtf8("dialog-password"))
self.actionManageKeys.setIcon(icon)
self.actionManageKeys.setObjectName(_fromUtf8("actionManageKeys"))
self.actionNetworkSwitch = QtGui.QAction(MainWindow)
self.actionNetworkSwitch.setObjectName(_fromUtf8("actionNetworkSwitch"))
self.actionExit = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("application-exit"))
self.actionExit.setIcon(icon)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.actionHelp = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("help-contents"))
self.actionHelp.setIcon(icon)
self.actionHelp.setObjectName(_fromUtf8("actionHelp"))
self.actionSupport = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("help-support"))
self.actionSupport.setIcon(icon)
self.actionSupport.setObjectName(_fromUtf8("actionSupport"))
self.actionAbout = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("help-about"))
self.actionAbout.setIcon(icon)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionSettings = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("document-properties"))
self.actionSettings.setIcon(icon)
self.actionSettings.setObjectName(_fromUtf8("actionSettings"))
self.actionRegenerateDeterministicAddresses = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("view-refresh"))
self.actionRegenerateDeterministicAddresses.setIcon(icon)
self.actionRegenerateDeterministicAddresses.setObjectName(_fromUtf8("actionRegenerateDeterministicAddresses"))
self.actionDeleteAllTrashedMessages = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("user-trash"))
self.actionDeleteAllTrashedMessages.setIcon(icon)
self.actionDeleteAllTrashedMessages.setObjectName(_fromUtf8("actionDeleteAllTrashedMessages"))
self.actionJoinChan = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("contact-new"))
self.actionJoinChan.setIcon(icon)
self.actionJoinChan.setObjectName(_fromUtf8("actionJoinChan"))
self.menuFile.addAction(self.actionManageKeys)
self.menuFile.addAction(self.actionDeleteAllTrashedMessages)
self.menuFile.addAction(self.actionRegenerateDeterministicAddresses)
self.menuFile.addAction(self.actionNetworkSwitch)
self.menuFile.addAction(self.actionExit)
self.menuSettings.addAction(self.actionSettings)
self.menuHelp.addAction(self.actionHelp)
self.menuHelp.addAction(self.actionSupport)
self.menuHelp.addAction(self.actionAbout)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuSettings.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(
self.tabWidget.indexOf(self.inbox)
)
self.tabWidgetSend.setCurrentIndex(
self.tabWidgetSend.indexOf(self.sendDirect)
)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.tableWidgetInbox, self.textEditInboxMessage)
MainWindow.setTabOrder(self.textEditInboxMessage, self.comboBoxSendFrom)
MainWindow.setTabOrder(self.comboBoxSendFrom, self.lineEditTo)
MainWindow.setTabOrder(self.lineEditTo, self.lineEditSubject)
MainWindow.setTabOrder(self.lineEditSubject, self.textEditMessage)
MainWindow.setTabOrder(self.textEditMessage, self.pushButtonAddSubscription)
def updateNetworkSwitchMenuLabel(self, dontconnect=None):
if dontconnect is None:
dontconnect = BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'dontconnect')
self.actionNetworkSwitch.setText(
_translate("MainWindow", "Go online", None)
if dontconnect else
_translate("MainWindow", "Go offline", None)
)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Bitmessage", None))
self.treeWidgetYourIdentities.headerItem().setText(0, _translate("MainWindow", "Identities", None))
self.pushButtonNewAddress.setText(_translate("MainWindow", "New Identity", None))
self.inboxSearchLineEdit.setPlaceholderText(_translate("MainWindow", "Search", None))
self.inboxSearchOption.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOption.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOption.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOption.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOption.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInbox.setSortingEnabled(True)
item = self.tableWidgetInbox.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInbox.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInbox.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInbox.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.inbox), _translate("MainWindow", "Messages", None))
self.tableWidgetAddressBook.setSortingEnabled(True)
item = self.tableWidgetAddressBook.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Address book", None))
item = self.tableWidgetAddressBook.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.pushButtonAddAddressBook.setText(_translate("MainWindow", "Add Contact", None))
self.pushButtonFetchNamecoinID.setText(_translate("MainWindow", "Fetch Namecoin ID", None))
self.label_3.setText(_translate("MainWindow", "Subject:", None))
self.label_2.setText(_translate("MainWindow", "From:", None))
self.label.setText(_translate("MainWindow", "To:", None))
#self.textEditMessage.setHtml("")
self.tabWidgetSend.setTabText(self.tabWidgetSend.indexOf(self.sendDirect), _translate("MainWindow", "Send ordinary Message", None))
self.label_8.setText(_translate("MainWindow", "From:", None))
self.label_7.setText(_translate("MainWindow", "Subject:", None))
#self.textEditMessageBroadcast.setHtml("")
self.tabWidgetSend.setTabText(self.tabWidgetSend.indexOf(self.sendBroadcast), _translate("MainWindow", "Send Message to your Subscribers", None))
self.pushButtonTTL.setText(_translate("MainWindow", "TTL:", None))
hours = 48
try:
hours = int(BMConfigParser().getint('bitmessagesettings', 'ttl')/60/60)
except:
pass
self.labelHumanFriendlyTTLDescription.setText(_translate("MainWindow", "%n hour(s)", None, QtCore.QCoreApplication.CodecForTr, hours))
self.pushButtonClear.setText(_translate("MainWindow", "Clear", None))
self.pushButtonSend.setText(_translate("MainWindow", "Send", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.send), _translate("MainWindow", "Send", None))
self.treeWidgetSubscriptions.headerItem().setText(0, _translate("MainWindow", "Subscriptions", None))
self.pushButtonAddSubscription.setText(_translate("MainWindow", "Add new Subscription", None))
self.inboxSearchLineEditSubscriptions.setPlaceholderText(_translate("MainWindow", "Search", None))
self.inboxSearchOptionSubscriptions.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOptionSubscriptions.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOptionSubscriptions.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOptionSubscriptions.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOptionSubscriptions.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInboxSubscriptions.setSortingEnabled(True)
item = self.tableWidgetInboxSubscriptions.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInboxSubscriptions.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInboxSubscriptions.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInboxSubscriptions.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.subscriptions), _translate("MainWindow", "Subscriptions", None))
self.treeWidgetChans.headerItem().setText(0, _translate("MainWindow", "Chans", None))
self.pushButtonAddChan.setText(_translate("MainWindow", "Add Chan", None))
self.inboxSearchLineEditChans.setPlaceholderText(_translate("MainWindow", "Search", None))
self.inboxSearchOptionChans.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOptionChans.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOptionChans.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOptionChans.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOptionChans.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInboxChans.setSortingEnabled(True)
item = self.tableWidgetInboxChans.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInboxChans.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInboxChans.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInboxChans.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.chans), _translate("MainWindow", "Chans", None))
self.blackwhitelist.retranslateUi()
self.tabWidget.setTabText(self.tabWidget.indexOf(self.blackwhitelist), _translate("blacklist", "Blacklist", None))
self.networkstatus.retranslateUi()
self.tabWidget.setTabText(self.tabWidget.indexOf(self.networkstatus), _translate("networkstatus", "Network Status", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuSettings.setTitle(_translate("MainWindow", "Settings", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionImport_keys.setText(_translate("MainWindow", "Import keys", None))
self.actionManageKeys.setText(_translate("MainWindow", "Manage keys", None))
self.actionExit.setText(_translate("MainWindow", "Quit", None))
self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+Q", None))
self.actionHelp.setText(_translate("MainWindow", "Help", None))
self.actionHelp.setShortcut(_translate("MainWindow", "F1", None))
self.actionSupport.setText(_translate("MainWindow", "Contact support", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionSettings.setText(_translate("MainWindow", "Settings", None))
self.actionRegenerateDeterministicAddresses.setText(_translate("MainWindow", "Regenerate deterministic addresses", None))
self.actionDeleteAllTrashedMessages.setText(_translate("MainWindow", "Delete all trashed messages", None))
self.actionJoinChan.setText(_translate("MainWindow", "Join / Create chan", None))
import bitmessage_icons_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = settingsmixin.SMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| |
# -*- coding: utf-8 -*-
"""
pygments.lexers.jvm
~~~~~~~~~~~~~~~~~~~
Pygments lexers for JVM languages.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
this, combined, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
from pygments.util import shebang_matches
from pygments import unistring as uni
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
'PigLexer', 'GoloLexer', 'JasminLexer']
class JavaLexer(RegexLexer):
"""
For `Java <http://www.sun.com/java/>`_ source code.
"""
name = 'Java'
aliases = ['java']
filenames = ['*.java']
mimetypes = ['text/x-java']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
# keywords: go before method names to avoid lexing "throw new XYZ"
# as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
# method names
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
(r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
(r'0[bB][01][01_]*[lL]?', Number.Bin),
(r'0[0-7_]+[lL]?', Number.Oct),
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\n', Text)
],
'class': [
(r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
class AspectJLexer(JavaLexer):
"""
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
.. versionadded:: 1.6
"""
name = 'AspectJ'
aliases = ['aspectj']
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
aj_keywords = set((
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
))
aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:'))
aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field'))
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
if token is Name and value in self.aj_keywords:
yield index, Keyword, value
elif token is Name.Label and value in self.aj_inter_type:
yield index, Keyword, value[:-1]
yield index, Operator, value[-1]
elif token is Name.Decorator and value in self.aj_inter_type_annotation:
yield index, Keyword, value
else:
yield index, token, value
class ScalaLexer(RegexLexer):
"""
For `Scala <http://www.scala-lang.org>`_ source code.
"""
name = 'Scala'
aliases = ['scala']
filenames = ['*.scala']
mimetypes = ['text/x-scala']
flags = re.MULTILINE | re.DOTALL
# don't use raw unicode strings!
op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter)
tokens = {
'root': [
# method names
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(u'@%s' % idrest, Name.Decorator),
(u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
u'lazy|match|new|override|pr(?:ivate|otected)'
u'|re(?:quires|turn)|s(?:ealed|uper)|'
u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
(u':(?!%s)' % op, Keyword, 'type'),
(u'%s%s\\b' % (upper, idrest), Name.Class),
(r'(true|false|null)\b', Keyword.Constant),
(r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
(r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(u"'%s" % idrest, Text.Symbol),
(r'[fs]"""', String, 'interptriplestring'), # interpolated strings
(r'[fs]"', String, 'interpstring'), # interpolated strings
(r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
# Name.Attribute)),
(idrest, Name),
(r'`[^`]+`', Name),
(r'\[', Operator, 'typeparam'),
(r'[(){};,.#]', Operator),
(op, Operator),
(r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
bygroups(Name.Class, Text, Operator), 'typeparam'),
(r'\s+', Text),
(r'\{', Operator, '#pop'),
(r'\(', Operator, '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
],
'type': [
(r'\s+', Text),
(r'<[%:]|>:|[#_]|forSome|type', Keyword),
(u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
(r'[({]', Operator, '#push'),
(u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
(u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text), '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'typeparam': [
(r'[\s,]+', Text),
(u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
(r'([\])}])', Operator, '#pop'),
(r'[(\[{]', Operator, '#push'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'import': [
(u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
],
'interpstringcommon': [
(r'[^"$\\]+', String),
(r'\$\$', String),
(r'\$' + letter_letter_digit, String.Interpol),
(r'\$\{', String.Interpol, 'interpbrace'),
(r'\\.', String),
],
'interptriplestring': [
(r'"""(?!")', String, '#pop'),
(r'"', String),
include('interpstringcommon'),
],
'interpstring': [
(r'"', String, '#pop'),
include('interpstringcommon'),
],
'interpbrace': [
(r'\}', String.Interpol, '#pop'),
(r'\{', String.Interpol, '#push'),
include('root'),
],
}
class GosuLexer(RegexLexer):
"""
For Gosu source code.
.. versionadded:: 1.5
"""
name = 'Gosu'
aliases = ['gosu']
filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
mimetypes = ['text/x-gosu']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
r'index|while|do|continue|break|return|try|catch|finally|this|'
r'throw|new|switch|case|default|eval|super|outer|classpath|'
r'using)\b', Keyword),
(r'(var|delegate|construct|function|private|internal|protected|'
r'public|abstract|override|final|static|extends|transient|'
r'implements|represents|readonly)\b', Keyword.Declaration),
(r'(property\s+)(get|set)?', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
(r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(uses)(\s+)([\w.]+\*?)',
bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'"', String, 'string'),
(r'(\??[.#])([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'(:)([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_$]\w*', Name),
(r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\n', Text)
],
'templateText': [
(r'(\\<)|(\\\$)', String),
(r'(<%@\s+)(extends|params)',
bygroups(Operator, Name.Decorator), 'stringTemplate'),
(r'<%!--.*?--%>', Comment.Multiline),
(r'(<%)|(<%=)', Operator, 'stringTemplate'),
(r'\$\{', Operator, 'stringTemplateShorthand'),
(r'.', String)
],
'string': [
(r'"', String, '#pop'),
include('templateText')
],
'stringTemplate': [
(r'"', String, 'string'),
(r'%>', Operator, '#pop'),
include('root')
],
'stringTemplateShorthand': [
(r'"', String, 'string'),
(r'\{', Operator, 'stringTemplateShorthand'),
(r'\}', Operator, '#pop'),
include('root')
],
}
class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
.. versionadded:: 1.5
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
stack = ['templateText']
for item in lexer.get_tokens_unprocessed(text, stack):
yield item
class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
.. versionadded:: 1.5
"""
name = 'Groovy'
aliases = ['groovy']
filenames = ['*.groovy','*.gradle']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# Groovy allows a file to start with a shebang
(r'#!(.*?)$', Comment.Preproc, 'base'),
default('base'),
],
'base': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'""".*?"""', String.Double),
(r"'''.*?'''", String.Single),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\"|[^/])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
def analyse_text(text):
return shebang_matches(text, r'groovy')
class IokeLexer(RegexLexer):
"""
For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
prototype based programming language) source.
.. versionadded:: 1.4
"""
name = 'Ioke'
filenames = ['*.ik']
aliases = ['ioke', 'ik']
mimetypes = ['text/x-iokesrc']
tokens = {
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
(r'#\{', Punctuation, 'textInterpolationRoot')
],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
],
'textInterpolationRoot': [
(r'\}', Punctuation, '#pop'),
include('root')
],
'slashRegexp': [
(r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
],
'squareRegexp': [
(r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
],
'root': [
(r'\n', Text),
(r'\s+', Text),
# Comments
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
# Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
# Symbols
(r':[\w!:?]+', String.Symbol),
(r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\"|[^"])*"', String.Symbol),
# Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
# Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
# Mimic
(r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
# Assignment
(r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
# keywords
(r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
r'with)(?![\w!:?])', Keyword.Reserved),
# Origin
(r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
# Base
(r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
r'(?![\w!:?])', Keyword),
# Ground
(r'(stackTraceAsText)(?![\w!:?])', Keyword),
# DefaultBehaviour Literals
(r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
r'(?![\w!:?])', Keyword),
# DefaultBehaviour Aspects
(r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour
(r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
r'(?![\w!:?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
# DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
r'(?![\w!:?])', Keyword),
# DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
r'(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
r'(?![\w!:?])', Keyword.Reserved),
# constants
(r'(nil|false|true)(?![\w!:?])', Name.Constant),
# names
(r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
r'Conditions|Definitions|FlowControl|Internal|Literals|'
r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
# functions
(u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
u'(?![\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
(r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'-?\d+', Number.Integer),
(r'#\(', Punctuation),
# Operators
(r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
(r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
Operator),
# Punctuation
(r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
# kinds
(r'[A-Z][\w!:?]*', Name.Class),
# default cellnames
(r'[a-z_][\w!:?]*', Name)
]
}
class ClojureLexer(RegexLexer):
"""
Lexer for `Clojure <http://clojure.org/>`_ source code.
.. versionadded:: 0.11
"""
name = 'Clojure'
aliases = ['clojure', 'clj']
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
)
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
declarations = (
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
)
builtins = (
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
'butlast', 'byte', 'cast', 'char', 'children', 'class',
'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
tokens = {
'root': [
# the comments - always starting with semicolon
# and going to the end of the line
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
(r'[,\s]+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0x-?[abcdef\d]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
# keywords
(r'::?#?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
(words(special_forms, suffix=' '), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
(words(declarations, suffix=' '), Keyword.Declaration),
# highlight the builtins
(words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# Clojure accepts vector notation
(r'(\[|\])', Punctuation),
# Clojure accepts map notation
(r'(\{|\})', Punctuation),
# the famous parentheses!
(r'(\(|\))', Punctuation),
],
}
class ClojureScriptLexer(ClojureLexer):
"""
Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
source code.
.. versionadded:: 2.0
"""
name = 'ClojureScript'
aliases = ['clojurescript', 'cljs']
filenames = ['*.cljs']
mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
class TeaLangLexer(RegexLexer):
"""
For `Tea <http://teatrove.org/>`_ source code. Only used within a
TeaTemplateLexer.
.. versionadded:: 1.5
"""
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w\.]*', Name.Decorator),
(r'(and|break|else|foreach|if|in|not|or|reverse)\b',
Keyword),
(r'(as|call|define)\b', Keyword.Declaration),
(r'(true|false|null)\b', Keyword.Constant),
(r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r'\'(\\\\|\\\'|[^\'])*\'', String),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_\$]\w*', Name),
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'template': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
class CeylonLexer(RegexLexer):
"""
For `Ceylon <http://ceylon-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Ceylon'
aliases = ['ceylon']
filenames = ['*.ceylon']
mimetypes = ['text/x-ceylon']
flags = re.MULTILINE | re.DOTALL
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
r'annotation|sealed)\b', Name.Decorator),
(r'(break|case|catch|continue|else|finally|for|in|'
r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
r'nonempty|then|outer|assert|let)\b', Keyword),
(r'(abstracts|extends|satisfies|'
r'super|given|of|out|assign)\b', Keyword.Declaration),
(r'(function|value|void|new)\b',
Keyword.Type),
(r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'".*``.*``.*"', String.Interpol),
(r'(\.)([a-z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
(r'#[0-9a-fA-F]+', Number.Hex),
(r'\$([01]{4})(_[01]{4})+', Number.Bin),
(r'\$[01]+', Number.Bin),
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
(r'[0-9]+[kMGTP]?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[A-Za-z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[a-z][\w.]*',
Name.Namespace, '#pop')
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
}
class KotlinLexer(RegexLexer):
"""
For `Kotlin <http://kotlinlang.org/>`_
source code.
.. versionadded:: 1.5
"""
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt']
mimetypes = ['text/x-kotlin']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc') + ']*')
kt_id = '(' + kt_name + '|`' + kt_name + '`)'
tokens = {
'root': [
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'::|!!|\?[:.]', Operator),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
(r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
(r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
(r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
(r'(abstract|annotation|as|break|by|catch|class|companion|const|'
r'constructor|continue|crossinline|data|do|dynamic|else|enum|'
r'external|false|final|finally|for|fun|get|if|import|in|infix|'
r'inline|inner|interface|internal|is|lateinit|noinline|null|'
r'object|open|operator|out|override|package|private|protected|'
r'public|reified|return|sealed|set|super|tailrec|this|throw|'
r'true|try|val|var|vararg|when|where|while)\b', Keyword),
(kt_id, Name),
],
'package': [
(r'\S+', Name.Namespace, '#pop')
],
'class': [
(kt_id, Name.Class, '#pop')
],
'property': [
(kt_id, Name.Property, '#pop')
],
'function': [
(kt_id, Name.Function, '#pop')
],
}
class XtendLexer(RegexLexer):
"""
For `Xtend <http://xtend-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Xtend'
aliases = ['xtend']
filenames = ['*.xtend']
mimetypes = ['text/x-xtend']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_$][\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
Keyword),
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
(u'(\u00BB)', String, 'template'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'(\\\\|\\'|[^'])*'", String),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
'template': [
(r"'''", String, '#pop'),
(u'\u00AB', String, '#pop'),
(r'.', String)
],
}
class PigLexer(RegexLexer):
"""
For `Pig Latin <https://pig.apache.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Pig'
aliases = ['pig']
filenames = ['*.pig']
mimetypes = ['text/x-pig']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'--.*', Comment),
(r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'\\\n', Text),
(r'\\', Text),
(r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
include('keywords'),
include('types'),
include('builtins'),
include('punct'),
include('operators'),
(r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text),
(r'([a-z_]\w*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[()#:]', Text),
(r'[^(:#\'")\s]+', Text),
(r'\S+\s+', Text) # TODO: make tests pass without \s+
],
'keywords': [
(r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
r'stream|through|union|using|void)\b', Keyword)
],
'builtins': [
(r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
r'TOKENIZE)\b', Name.Builtin)
],
'types': [
(r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
r'int|long|tuple)\b', Keyword.Type)
],
'punct': [
(r'[;(){}\[\]]', Punctuation),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class GoloLexer(RegexLexer):
"""
For `Golo <http://golo-lang.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Golo'
filenames = ['*.golo']
aliases = ['golo']
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*$', Comment),
(r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
Operator),
(r'(?<=[^-])(-)(?=[^-])', Operator),
(r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
(r'[]{}|(),[]', Punctuation),
(r'(module|import)(\s+)',
bygroups(Keyword.Namespace, Text),
'modname'),
(r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
(r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
(r'(let|var)(\s+)',
bygroups(Keyword.Declaration, Text),
'varname'),
(r'(struct)(\s+)',
bygroups(Keyword.Declaration, Text),
'structname'),
(r'(function)(\s+)',
bygroups(Keyword.Declaration, Text),
'funcname'),
(r'(null|true|false)\b', Keyword.Constant),
(r'(augment|pimp'
r'|if|else|case|match|return'
r'|case|when|then|otherwise'
r'|while|for|foreach'
r'|try|catch|finally|throw'
r'|local'
r'|continue|break)\b', Keyword),
(r'(map|array|list|set|vector|tuple)(\[)',
bygroups(Name.Builtin, Punctuation)),
(r'(print|println|readln|raise|fun'
r'|asInterfaceInstance)\b', Name.Builtin),
(r'(`?[a-zA-Z_][\w$]*)(\()',
bygroups(Name.Function, Punctuation)),
(r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
(r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'-?\d[\d_]*L', Number.Integer.Long),
(r'-?\d[\d_]*', Number.Integer),
('`?[a-zA-Z_][\w$]*', Name),
(r'@[a-zA-Z_][\w$.]*', Name.Decorator),
(r'"""', String, combined('stringescape', 'triplestring')),
(r'"', String, combined('stringescape', 'doublestring')),
(r"'", String, combined('stringescape', 'singlestring')),
(r'----((.|\n)*?)----', String.Doc)
],
'funcname': [
(r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
],
'modname': [
(r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
],
'structname': [
(r'`?[\w.]+\*?', Name.Class, '#pop')
],
'varname': [
(r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
],
'string': [
(r'[^\\\'"\n]+', String),
(r'[\'"\\]', String)
],
'stringescape': [
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
],
'triplestring': [
(r'"""', String, '#pop'),
include('string'),
(r'\n', String),
],
'doublestring': [
(r'"', String.Double, '#pop'),
include('string'),
],
'singlestring': [
(r"'", String, '#pop'),
include('string'),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class JasminLexer(RegexLexer):
"""
For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
.. versionadded:: 2.0
"""
name = 'Jasmin'
aliases = ['jasmin', 'jasminxt']
filenames = ['*.j']
_whitespace = r' \n\t\r'
_ws = r'(?:[%s]+)' % _whitespace
_separator = r'%s:=' % _whitespace
_break = r'(?=[%s]|$)' % _separator
_name = r'[^%s]+' % _separator
_unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
tokens = {
'default': [
(r'\n', Text, '#pop'),
(r"'", String.Single, ('#pop', 'quote')),
(r'"', String.Double, 'string'),
(r'=', Punctuation),
(r':', Punctuation, 'label'),
(_ws, Text),
(r';.*', Comment.Single),
(r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
(r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
(r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
(r'\$%s' % _name, Name.Variable),
# Directives
(r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
(r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
r'annotation|bridge|class|default|enum|field|final|fpstrict|'
r'interface|native|private|protected|public|signature|static|'
r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
Keyword.Reserved),
(r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
(r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
r'invisibleparam|outer|visible|visibleparam)%s' % _break,
Keyword.Reserved, 'class/convert-dots'),
(r'\.field%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'field')),
(r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
'no-verification'),
(r'\.method%s' % _break, Keyword.Reserved, 'method'),
(r'\.set%s' % _break, Keyword.Reserved, 'var'),
(r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
(r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
(r'is%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'var')),
(r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
(r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
# Instructions
(words((
'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
(r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
Keyword.Reserved, 'class/no-dots'),
(r'invoke(dynamic|interface|nonvirtual|special|'
r'static|virtual)%s' % _break, Keyword.Reserved,
'invocation'),
(r'(getfield|putfield)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'field')),
(r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'static')),
(words((
'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
'ifnull', 'jsr', 'jsr_w'), suffix=_break),
Keyword.Reserved, 'label'),
(r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
'descriptor/convert-dots'),
(r'tableswitch%s' % _break, Keyword.Reserved, 'table')
],
'quote': [
(r"'", String.Single, '#pop'),
(r'\\u[\da-fA-F]{4}', String.Escape),
(r"[^'\\]+", String.Single)
],
'string': [
(r'"', String.Double, '#pop'),
(r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
String.Escape),
(r'[^"\\]+', String.Double)
],
'root': [
(r'\n+', Text),
(r"'", String.Single, 'quote'),
include('default'),
(r'(%s)([ \t\r]*)(:)' % _name,
bygroups(Name.Label, Text, Punctuation)),
(_name, String.Other)
],
'annotation': [
(r'\n', Text, ('#pop', 'annotation-body')),
(r'default%s' % _break, Keyword.Reserved,
('#pop', 'annotation-default')),
include('default')
],
'annotation-body': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
],
'annotation-default': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
default(('annotation-items', 'descriptor/no-dots'))
],
'annotation-items': [
(r"'", String.Single, 'quote'),
include('default'),
(_name, String.Other)
],
'caught-exception': [
(r'all%s' % _break, Keyword, '#pop'),
include('exception')
],
'class/convert-dots': [
include('default'),
(r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'class/no-dots': [
include('default'),
(r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'descriptor/convert-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptor/no-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptors/convert-dots': [
(r'\)', Punctuation, '#pop'),
default('descriptor/convert-dots')
],
'enclosing-method': [
(_ws, Text),
(r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
default(('#pop', 'class/convert-dots'))
],
'exception': [
include('default'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Exception), '#pop')
],
'field': [
(r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
'#pop')
],
'invocation': [
include('default'),
(r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'label': [
include('default'),
(_name, Name.Label, '#pop')
],
'method': [
include('default'),
(r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'no-verification': [
(r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
include('default')
],
'static': [
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
],
'table': [
(r'\n+', Text),
(r'default%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, Name.Label)
],
'var': [
include('default'),
(_name, Name.Variable, '#pop')
],
'verification': [
include('default'),
(r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
_break, Keyword, '#pop'),
(r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
(r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
]
}
def analyse_text(text):
score = 0
if re.search(r'^\s*\.class\s', text, re.MULTILINE):
score += 0.5
if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
score += 0.3
if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
r'inner|interface|limit|set|signature|stack)\b', text,
re.MULTILINE):
score += 0.6
return score
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
# The set of attributes common between the RevokeEvent
# and the dictionaries created from the token Data.
_NAMES = ['trust_id',
'consumer_id',
'access_token_id',
'audit_id',
'audit_chain_id',
'expires_at',
'domain_id',
'project_id',
'user_id',
'role_id']
# Additional arguments for creating a RevokeEvent
_EVENT_ARGS = ['issued_before', 'revoked_at']
# Names of attributes in the RevocationEvent, including "virtual" attributes.
# Virtual attributes are those added based on other values.
_EVENT_NAMES = _NAMES + ['domain_scope_id']
# Values that will be in the token data but not in the event.
# These will compared with event values that have different names.
# For example: both trustor_id and trustee_id are compared against user_id
_TOKEN_KEYS = ['identity_domain_id',
'assignment_domain_id',
'issued_at',
'trustor_id',
'trustee_id']
REVOKE_KEYS = _NAMES + _EVENT_ARGS
def blank_token_data(issued_at):
token_data = dict()
for name in _NAMES:
token_data[name] = None
for name in _TOKEN_KEYS:
token_data[name] = None
# required field
token_data['issued_at'] = issued_at
return token_data
class RevokeEvent(object):
def __init__(self, **kwargs):
for k in REVOKE_KEYS:
v = kwargs.get(k, None)
setattr(self, k, v)
if self.domain_id and self.expires_at:
# This is revoking a domain-scoped token.
self.domain_scope_id = self.domain_id
self.domain_id = None
else:
# This is revoking all tokens for a domain.
self.domain_scope_id = None
if self.expires_at is not None:
# Trim off the expiration time because MySQL timestamps are only
# accurate to the second.
self.expires_at = self.expires_at.replace(microsecond=0)
if self.revoked_at is None:
self.revoked_at = timeutils.utcnow()
if self.issued_before is None:
self.issued_before = self.revoked_at
def to_dict(self):
keys = ['user_id',
'role_id',
'domain_id',
'domain_scope_id',
'project_id',
'audit_id',
'audit_chain_id',
]
event = dict((key, self.__dict__[key]) for key in keys
if self.__dict__[key] is not None)
if self.trust_id is not None:
event['OS-TRUST:trust_id'] = self.trust_id
if self.consumer_id is not None:
event['OS-OAUTH1:consumer_id'] = self.consumer_id
if self.consumer_id is not None:
event['OS-OAUTH1:access_token_id'] = self.access_token_id
if self.expires_at is not None:
event['expires_at'] = timeutils.isotime(self.expires_at)
if self.issued_before is not None:
event['issued_before'] = timeutils.isotime(self.issued_before,
subsecond=True)
return event
def key_for_name(self, name):
return "%s=%s" % (name, getattr(self, name) or '*')
def attr_keys(event):
return map(event.key_for_name, _EVENT_NAMES)
class RevokeTree(object):
"""Fast Revocation Checking Tree Structure
The Tree is an index to quickly match tokens against events.
Each node is a hashtable of key=value combinations from revocation events.
The
"""
def __init__(self, revoke_events=None):
self.revoke_map = dict()
self.add_events(revoke_events)
def add_event(self, event):
"""Updates the tree based on a revocation event.
Creates any necessary internal nodes in the tree corresponding to the
fields of the revocation event. The leaf node will always be set to
the latest 'issued_before' for events that are otherwise identical.
:param: Event to add to the tree
:returns: the event that was passed in.
"""
revoke_map = self.revoke_map
for key in attr_keys(event):
revoke_map = revoke_map.setdefault(key, {})
revoke_map['issued_before'] = max(
event.issued_before, revoke_map.get(
'issued_before', event.issued_before))
return event
def remove_event(self, event):
"""Update the tree based on the removal of a Revocation Event
Removes empty nodes from the tree from the leaf back to the root.
If multiple events trace the same path, but have different
'issued_before' values, only the last is ever stored in the tree.
So only an exact match on 'issued_before' ever triggers a removal
:param: Event to remove from the tree
"""
stack = []
revoke_map = self.revoke_map
for name in _EVENT_NAMES:
key = event.key_for_name(name)
nxt = revoke_map.get(key)
if nxt is None:
break
stack.append((revoke_map, key, nxt))
revoke_map = nxt
else:
if event.issued_before == revoke_map['issued_before']:
revoke_map.pop('issued_before')
for parent, key, child in reversed(stack):
if not any(child):
del parent[key]
def add_events(self, revoke_events):
return map(self.add_event, revoke_events or [])
def is_revoked(self, token_data):
"""Check if a token matches the revocation event
Compare the values for each level of the tree with the values from
the token, accounting for attributes that have alternative
keys, and for wildcard matches.
if there is a match, continue down the tree.
if there is no match, exit early.
token_data is a map based on a flattened view of token.
The required fields are:
'expires_at','user_id', 'project_id', 'identity_domain_id',
'assignment_domain_id', 'trust_id', 'trustor_id', 'trustee_id'
'consumer_id', 'access_token_id'
"""
# Alternative names to be checked in token for every field in
# revoke tree.
alternatives = {
'user_id': ['user_id', 'trustor_id', 'trustee_id'],
'domain_id': ['identity_domain_id', 'assignment_domain_id'],
# For a domain-scoped token, the domain is in assignment_domain_id.
'domain_scope_id': ['assignment_domain_id', ],
}
# Contains current forest (collection of trees) to be checked.
partial_matches = [self.revoke_map]
# We iterate over every layer of our revoke tree (except the last one).
for name in _EVENT_NAMES:
# bundle is the set of partial matches for the next level down
# the tree
bundle = []
wildcard = '%s=*' % (name,)
# For every tree in current forest.
for tree in partial_matches:
# If there is wildcard node on current level we take it.
bundle.append(tree.get(wildcard))
if name == 'role_id':
# Roles are very special since a token has a list of them.
# If the revocation event matches any one of them,
# revoke the token.
for role_id in token_data.get('roles', []):
bundle.append(tree.get('role_id=%s' % role_id))
else:
# For other fields we try to get any branch that concur
# with any alternative field in the token.
for alt_name in alternatives.get(name, [name]):
bundle.append(
tree.get('%s=%s' % (name, token_data[alt_name])))
# tree.get returns `None` if there is no match, so `bundle.append`
# adds a 'None' entry. This call remoes the `None` entries.
partial_matches = [x for x in bundle if x is not None]
if not partial_matches:
# If we end up with no branches to follow means that the token
# is definitely not in the revoke tree and all further
# iterations will be for nothing.
return False
# The last (leaf) level is checked in a special way because we verify
# issued_at field differently.
for leaf in partial_matches:
try:
if leaf['issued_before'] > token_data['issued_at']:
return True
except KeyError:
pass
# If we made it out of the loop then no element in revocation tree
# corresponds to our token and it is good.
return False
def build_token_values_v2(access, default_domain_id):
token_data = access['token']
token_expires_at = timeutils.parse_isotime(token_data['expires'])
# Trim off the microseconds because the revocation event only has
# expirations accurate to the second.
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
token_values['user_id'] = access.get('user', {}).get('id')
project = token_data.get('tenant')
if project is not None:
token_values['project_id'] = project['id']
else:
token_values['project_id'] = None
token_values['identity_domain_id'] = default_domain_id
token_values['assignment_domain_id'] = default_domain_id
trust = token_data.get('trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_id']
token_values['trustee_id'] = trust['trustee_id']
token_values['consumer_id'] = None
token_values['access_token_id'] = None
role_list = []
# Roles are by ID in metadata and by name in the user section
roles = access.get('metadata', {}).get('roles', [])
for role in roles:
role_list.append(role)
token_values['roles'] = role_list
return token_values
def build_token_values(token_data):
token_expires_at = timeutils.parse_isotime(token_data['expires_at'])
# Trim off the microseconds because the revocation event only has
# expirations accurate to the second.
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
user = token_data.get('user')
if user is not None:
token_values['user_id'] = user['id']
# Federated users do not have a domain, be defensive and get the user
# domain set to None in the federated user case.
token_values['identity_domain_id'] = user.get('domain', {}).get('id')
else:
token_values['user_id'] = None
token_values['identity_domain_id'] = None
project = token_data.get('project', token_data.get('tenant'))
if project is not None:
token_values['project_id'] = project['id']
token_values['assignment_domain_id'] = project['domain']['id']
else:
token_values['project_id'] = None
domain = token_data.get('domain')
if domain is not None:
token_values['assignment_domain_id'] = domain['id']
else:
token_values['assignment_domain_id'] = None
role_list = []
roles = token_data.get('roles')
if roles is not None:
for role in roles:
role_list.append(role['id'])
token_values['roles'] = role_list
trust = token_data.get('OS-TRUST:trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_user']['id']
token_values['trustee_id'] = trust['trustee_user']['id']
oauth1 = token_data.get('OS-OAUTH1')
if oauth1 is None:
token_values['consumer_id'] = None
token_values['access_token_id'] = None
else:
token_values['consumer_id'] = oauth1['consumer_id']
token_values['access_token_id'] = oauth1['access_token_id']
return token_values
| |
import svmc
from svmc import C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR
from svmc import LINEAR, POLY, RBF, SIGMOID, PRECOMPUTED
from math import exp, fabs
def _int_array(seq):
size = len(seq)
array = svmc.new_int(size)
i = 0
for item in seq:
svmc.int_setitem(array,i,item)
i = i + 1
return array
def _double_array(seq):
size = len(seq)
array = svmc.new_double(size)
i = 0
for item in seq:
svmc.double_setitem(array,i,item)
i = i + 1
return array
def _free_int_array(x):
if x != 'NULL' and x != None:
svmc.delete_int(x)
def _free_double_array(x):
if x != 'NULL' and x != None:
svmc.delete_double(x)
def _int_array_to_list(x,n):
return map(svmc.int_getitem,[x]*n,range(n))
def _double_array_to_list(x,n):
return map(svmc.double_getitem,[x]*n,range(n))
class svm_parameter:
# default values
default_parameters = {
'svm_type' : C_SVC,
'kernel_type' : RBF,
'degree' : 3,
'gamma' : 0, # 1/num_features
'coef0' : 0,
'nu' : 0.5,
'cache_size' : 100,
'C' : 1,
'eps' : 1e-3,
'p' : 0.1,
'shrinking' : 1,
'nr_weight' : 0,
'weight_label' : [],
'weight' : [],
'probability' : 0
}
def __init__(self,**kw):
self.__dict__['param'] = svmc.new_svm_parameter()
for attr,val in self.default_parameters.items():
setattr(self,attr,val)
for attr,val in kw.items():
setattr(self,attr,val)
def __getattr__(self,attr):
get_func = getattr(svmc,'svm_parameter_%s_get' % (attr))
return get_func(self.param)
def __setattr__(self,attr,val):
if attr == 'weight_label':
self.__dict__['weight_label_len'] = len(val)
val = _int_array(val)
_free_int_array(self.weight_label)
elif attr == 'weight':
self.__dict__['weight_len'] = len(val)
val = _double_array(val)
_free_double_array(self.weight)
set_func = getattr(svmc,'svm_parameter_%s_set' % (attr))
set_func(self.param,val)
def __repr__(self):
ret = '<svm_parameter:'
for name in dir(svmc):
if name[:len('svm_parameter_')] == 'svm_parameter_' and name[-len('_set'):] == '_set':
attr = name[len('svm_parameter_'):-len('_set')]
if attr == 'weight_label':
ret = ret+' weight_label = %s,' % _int_array_to_list(self.weight_label,self.weight_label_len)
elif attr == 'weight':
ret = ret+' weight = %s,' % _double_array_to_list(self.weight,self.weight_len)
else:
ret = ret+' %s = %s,' % (attr,getattr(self,attr))
return ret+'>'
def __del__(self):
_free_int_array(self.weight_label)
_free_double_array(self.weight)
svmc.delete_svm_parameter(self.param)
def _convert_to_svm_node_array(x):
""" convert a sequence or mapping to an svm_node array """
import operator
# Find non zero elements
iter_range = []
if type(x) == dict:
for k, v in x.iteritems():
# all zeros kept due to the precomputed kernel; no good solution yet
# if v != 0:
iter_range.append( k )
elif operator.isSequenceType(x):
for j in range(len(x)):
# if x[j] != 0:
iter_range.append( j )
else:
raise TypeError,"data must be a mapping or a sequence"
iter_range.sort()
data = svmc.svm_node_array(len(iter_range)+1)
svmc.svm_node_array_set(data,len(iter_range),-1,0)
j = 0
for k in iter_range:
svmc.svm_node_array_set(data,j,k,x[k])
j = j + 1
return data
class svm_problem:
def __init__(self,y,x):
assert len(y) == len(x)
self.prob = prob = svmc.new_svm_problem()
self.size = size = len(y)
self.y_array = y_array = svmc.new_double(size)
for i in range(size):
svmc.double_setitem(y_array,i,y[i])
self.x_matrix = x_matrix = svmc.svm_node_matrix(size)
self.data = []
self.maxlen = 0;
for i in range(size):
data = _convert_to_svm_node_array(x[i])
self.data.append(data);
svmc.svm_node_matrix_set(x_matrix,i,data)
if type(x[i]) == dict:
if (len(x[i]) > 0):
self.maxlen = max(self.maxlen,max(x[i].keys()))
else:
self.maxlen = max(self.maxlen,len(x[i]))
svmc.svm_problem_l_set(prob,size)
svmc.svm_problem_y_set(prob,y_array)
svmc.svm_problem_x_set(prob,x_matrix)
def __repr__(self):
return "<svm_problem: size = %s>" % (self.size)
def __del__(self):
svmc.delete_svm_problem(self.prob)
svmc.delete_double(self.y_array)
for i in range(self.size):
svmc.svm_node_array_destroy(self.data[i])
svmc.svm_node_matrix_destroy(self.x_matrix)
class svm_model:
def __init__(self,arg1,arg2=None):
if arg2 == None:
# create model from file
filename = arg1
self.model = svmc.svm_load_model(filename)
else:
# create model from problem and parameter
prob,param = arg1,arg2
self.prob = prob
if param.gamma == 0:
param.gamma = 1.0/prob.maxlen
msg = svmc.svm_check_parameter(prob.prob,param.param)
if msg: raise ValueError, msg
self.model = svmc.svm_train(prob.prob,param.param)
#setup some classwide variables
self.nr_class = svmc.svm_get_nr_class(self.model)
self.svm_type = svmc.svm_get_svm_type(self.model)
#create labels(classes)
intarr = svmc.new_int(self.nr_class)
svmc.svm_get_labels(self.model,intarr)
self.labels = _int_array_to_list(intarr, self.nr_class)
svmc.delete_int(intarr)
#check if valid probability model
self.probability = svmc.svm_check_probability_model(self.model)
def predict(self,x):
data = _convert_to_svm_node_array(x)
ret = svmc.svm_predict(self.model,data)
svmc.svm_node_array_destroy(data)
return ret
def get_nr_class(self):
return self.nr_class
def get_labels(self):
if self.svm_type == NU_SVR or self.svm_type == EPSILON_SVR or self.svm_type == ONE_CLASS:
raise TypeError, "Unable to get label from a SVR/ONE_CLASS model"
return self.labels
def predict_values_raw(self,x):
#convert x into svm_node, allocate a double array for return
n = self.nr_class*(self.nr_class-1)//2
data = _convert_to_svm_node_array(x)
dblarr = svmc.new_double(n)
svmc.svm_predict_values(self.model, data, dblarr)
ret = _double_array_to_list(dblarr, n)
svmc.delete_double(dblarr)
svmc.svm_node_array_destroy(data)
return ret
def predict_values(self,x):
v=self.predict_values_raw(x)
if self.svm_type == NU_SVR or self.svm_type == EPSILON_SVR or self.svm_type == ONE_CLASS:
return v[0]
else: #self.svm_type == C_SVC or self.svm_type == NU_SVC
count = 0
d = {}
for i in range(len(self.labels)):
for j in range(i+1, len(self.labels)):
d[self.labels[i],self.labels[j]] = v[count]
d[self.labels[j],self.labels[i]] = -v[count]
count += 1
return d
def predict_probability(self,x):
#c code will do nothing on wrong type, so we have to check ourself
if self.svm_type == NU_SVR or self.svm_type == EPSILON_SVR:
raise TypeError, "call get_svr_probability or get_svr_pdf for probability output of regression"
elif self.svm_type == ONE_CLASS:
raise TypeError, "probability not supported yet for one-class problem"
#only C_SVC,NU_SVC goes in
if not self.probability:
raise TypeError, "model does not support probabiliy estimates"
#convert x into svm_node, alloc a double array to receive probabilities
data = _convert_to_svm_node_array(x)
dblarr = svmc.new_double(self.nr_class)
pred = svmc.svm_predict_probability(self.model, data, dblarr)
pv = _double_array_to_list(dblarr, self.nr_class)
svmc.delete_double(dblarr)
svmc.svm_node_array_destroy(data)
p = {}
for i in range(len(self.labels)):
p[self.labels[i]] = pv[i]
return pred, p
def get_svr_probability(self):
#leave the Error checking to svm.cpp code
ret = svmc.svm_get_svr_probability(self.model)
if ret == 0:
raise TypeError, "not a regression model or probability information not available"
return ret
def get_svr_pdf(self):
#get_svr_probability will handle error checking
sigma = self.get_svr_probability()
return lambda z: exp(-fabs(z)/sigma)/(2*sigma)
def save(self,filename):
svmc.svm_save_model(filename,self.model)
def __del__(self):
svmc.svm_destroy_model(self.model)
def cross_validation(prob, param, fold):
if param.gamma == 0:
param.gamma = 1.0/prob.maxlen
dblarr = svmc.new_double(prob.size)
svmc.svm_cross_validation(prob.prob, param.param, fold, dblarr)
ret = _double_array_to_list(dblarr, prob.size)
svmc.delete_double(dblarr)
return ret
| |
# -*- coding: utf-8 -*-
"""
Script for paradigm descriptors' extraction on the Mental-Time-Travel protocol
for both models
author: Ana Luisa Pinho
e-mail: ana.pinho@inria.fr
Last update: November 2019
Compatibility: Python 3.5
"""
import os
import glob
import csv
import numpy as np
# %%
# ========================== GENERAL PARAMETERS ===============================
REFERENCES_WE = ['lermite_observe', 'debit_reduit',
'les_animaux_broutent', 'premiere_rencontre',
'seconde_rencontre']
REFERENCES_SN = ['dolmens_sous_la_pluie', 'le_grand_pretre_observe',
'les_feux_follets_sallument', 'premier_rituel',
'second_rituel']
CUES_SPACE = ['sud_ou_nord', 'sud_ou_nord', 'ouest_ou_est', 'ouest_ou_est']
CUES_TIME = ['avant_ou_apres', 'avant_ou_apres']
# *****************************************************************************
# #######################################################
# # Island story
# island = 'we'
# # Participants' list
# participant_list = [1, 4, 5, 7, 8, 9, 12, 13, 14]
# # Which file to load? (numbering starts from 0)
# input_no = 0
# # Sessions's ID (numbering starts from 0)
# first_sess = 0
# last_sess = 2
# #######################################################
'''
Exceptions for IBC participants of island "we":
Participant: input_no, first_sess, last_sess
sub-06: 0, 0, 0
sub-06: 1, 1, 2
sub-11: 0, 0, 1
sub-11: 1, 2, 2
sub-15: 0, 0, 0 (very incomplete)
sub-15: 1, 1, 2
'''
# # Island story
# island = 'we'
# # Participants' list
# participant_list = [06]
# # Which file to load? (numbering starts from 0)
# input_no = 0
# # Sessions's ID (numbering starts from 0)
# first_sess = 0
# last_sess = 0
# #######################################################
# # Island story
# island = 'sn'
# # Participants' list
# participant_list = [1, 4, 5, 6, 7, 9, 11, 12, 13, 14]
# # Which file to load? (numbering starts from 0)
# input_no = 0
# # Sessions's ID (numbering starts from 0)
# first_sess = 0
# last_sess = 2
'''
Exceptions for IBC participants of island "sn":
sub-15: no runs
'''
# #######################################################
# *****************************************************************************
# #### DEFINE PATHWAYS ####
# Parent directory
main_dir = '../../../../analysis_pipeline/ibc_main/neurospin_data/info'
# Subject folder
# fname_prefix = 'pilot'
fname_prefix = 'sub'
# Name of the task protocol
protocol = 'mtt'
# fname of folder with log_files
raw_fname = 'log_' + island
# %%
# ============================== FUNCTIONS ====================================
def create_new_dir(dir_path):
"""
Creates directory of output files
"""
if not os.path.exists(dir_path):
os.makedirs(dir_path)
def load_log_file(input_dir, prefix, subject, task, logdir, no):
"""
Load the log files
"""
filename_participant_id = prefix + "-" + "%02d" % subject
# Set the pathway of the input files
inputs_path = os.path.join(input_dir, filename_participant_id, task,
logdir)
inputs = glob.glob(os.path.join(inputs_path, "*.xpd"))
inputs.sort()
fname = inputs[no]
# Load the file
inlist = []
inlist = [line for line in csv.reader(open(fname), delimiter=',')]
return inlist
def stack_descriptors(onsets, durations, names):
"""
Create table of paradigm descriptors
"""
# Headers of the paradigm descriptors' files according to BIDS
header = ['onset', 'duration', 'trial_type']
table = np.vstack((header, np.vstack((onsets, durations, names)).T))
return table
def save_output(file_path, liste):
"""
Save output file
"""
with open(file_path, 'w') as fp:
a = csv.writer(fp, delimiter='\t')
a.writerows(liste)
# %%
# ============================== PARSER =======================================
# %%
# Create a file for each participant and ...
for participant in participant_list:
# Clean or create output folders
path1 = os.path.join(main_dir, fname_prefix + '-' + '%02d' % participant,
protocol, 'absolute_model_' + island)
path2 = os.path.join(main_dir, fname_prefix + '-' + '%02d' % participant,
protocol, 'relative_model_' + island)
create_new_dir(path1)
create_new_dir(path2)
# Load input files
input_list = load_log_file(main_dir, fname_prefix, participant, protocol,
raw_fname, input_no)
# Parse the necessary information
for r, row in enumerate(input_list):
if row[0] == str(participant):
break
else:
continue
input_list = input_list[r:]
# Create a list of sessions' list
data_list = []
length = 0
for b, block in enumerate(np.arange(first_sess, last_sess + 1)):
data_block = []
idx = b * length
for dl, line in enumerate(input_list[idx:]):
if line[1] == str(block):
data_block.append(line)
else:
length = dl
break
data_list.append(data_block)
continue
# ... for every block
for n, data in enumerate(data_list):
# Read the table
onset = []
duration = []
name_abs = []
name_relat = []
for datum in data:
if participant == 15 and datum[1] == '0' and datum[2] != '0' and \
island == 'we':
print(datum[8])
break
datum = datum[4:]
# Onsets and durations of conditions
onset.append(float(datum[5]) / 1000)
duration.append(float(datum[6]) / 1000)
# Names of conditions for both models
# Beginning of a trial
if datum[4] in REFERENCES_WE + REFERENCES_SN:
# References of relative model
name_relat.append(datum[0] + '_all_reference')
elif datum[4] in CUES_SPACE:
# References of absolute model for space
name_abs.append(datum[0] + '_' + datum[1] + '_reference')
# Space cues
name_abs.append(datum[0] + '_all_reference_space_cue')
name_relat.append(datum[0] + '_all_space_cue')
elif datum[4] in CUES_TIME:
# References of absolute model for time
name_abs.append(datum[0] + '_' + datum[2] + '_reference')
# Time cues
name_abs.append(datum[0] + '_all_reference_time_cue')
name_relat.append(datum[0] + '_all_time_cue')
elif datum[4] == 'response':
# Events of the relative model...
# ... for time
if datum[9] in ['before', 'after']:
name_abs.append(datum[0] + '_' + datum[2] + \
'_reference_' + datum[3] + '_event')
name_relat.append(datum[0] + '_' + datum[9] + '_' + \
datum[3] + '_event')
# ... for space
else:
name_abs.append(datum[0] + '_' + datum[1] + \
'_reference_' + datum[3] + '_event')
name_relat.append(datum[0] + '_' + datum[9] + 'side_' + \
datum[3] + '_event')
# Responses for both models
name_abs.append(datum[0] + '_all_reference_response')
name_relat.append(datum[0] + '_all_event_response')
# Events of the absolute model
else:
continue
# Stack onset, duration and trial_type arrays
abs_descriptors = stack_descriptors(onset, duration, name_abs)
relat_descriptors = stack_descriptors(onset, duration, name_relat)
# Output files
abs_fname = 'paradigm_descriptors_mtt_absolute-model' + '_' + \
island + '_' + fname_prefix + '-' + \
'%02d' % participant + '_run' + \
'%01d' % (n + first_sess) + '.tsv'
relat_fname = 'paradigm_descriptors_mtt_relative-model' + '_' + \
island + '_' + fname_prefix + '-' + \
'%02d' % participant + '_run' + \
'%01d' % (n + first_sess) + '.tsv'
output1 = os.path.join(path1, abs_fname)
output2 = os.path.join(path2, relat_fname)
print(output1, output2)
# Save files
save_output(output1, abs_descriptors)
save_output(output2, relat_descriptors)
| |
import logging
from logging import handlers
from xml.dom.minidom import Document
import traceback
import xml.dom
import xml.sax.saxutils
import sys
import re
import time
import os
import hashlib
import json
from urlparse import urlparse
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
def setup_logger():
"""
Setup a logger.
"""
logger = logging.getLogger('python_modular_input')
logger.propagate = False # Prevent the log messages from being duplicated in the python.log file
logger.setLevel(logging.DEBUG)
file_handler = handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', 'python_modular_input.log']), maxBytes=25000000, backupCount=5)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
# Make a logger unless it already exists
try:
logger
except NameError:
logger = setup_logger()
class FieldValidationException(Exception):
pass
class Field(object):
"""
This is the base class that should be used to for field validators. Sub-class this and override to_python if you need custom validation.
"""
DATA_TYPE_STRING = 'string'
DATA_TYPE_NUMBER = 'number'
DATA_TYPE_BOOLEAN = 'boolean'
def get_data_type(self):
"""
Get the type of the field.
"""
return Field.DATA_TYPE_STRING
def __init__(self, name, title, description, none_allowed=False, empty_allowed=True, required_on_create=None, required_on_edit=None):
"""
Create the field.
Arguments:
name -- Set the name of the field (e.g. "database_server")
title -- Set the human readable title (e.g. "Database server")
description -- Set the human readable description of the field (e.g. "The IP or domain name of the database server")
none_allowed -- Is a value of none allowed?
empty_allowed -- Is an empty string allowed?
required_on_create -- Is this field required when creating?
required_on_edit -- Is this field required when editing?
"""
# Try to set required_on_create and required_on_edit to sane defaults if not defined
if required_on_create is None and none_allowed:
required_on_create = False
elif required_on_create is None and not none_allowed:
required_on_create = True
if required_on_edit is None and required_on_create is not None:
required_on_edit = required_on_create
if name is None:
raise ValueError("The name parameter cannot be none")
if len(name.strip()) == 0:
raise ValueError("The name parameter cannot be empty")
if title is None:
raise ValueError("The title parameter cannot be none")
if len(title.strip()) == 0:
raise ValueError("The title parameter cannot be empty")
if description is None:
raise ValueError("The description parameter cannot be none")
if len(description.strip()) == 0:
raise ValueError("The description parameter cannot be empty")
self.name = name
self.title = title
self.description = description
self.none_allowed = none_allowed
self.empty_allowed = empty_allowed
self.required_on_create = required_on_create
self.required_on_edit = required_on_edit
def to_python(self, value):
"""
Convert the field to a Python object. Should throw a FieldValidationException if the data is invalid.
Arguments:
value -- The value to convert
"""
if not self.none_allowed and value is None:
raise FieldValidationException("The value for the '%s' parameter cannot be empty" % (self.name))
if not self.empty_allowed and len(str(value).strip()) == 0:
raise FieldValidationException("The value for the '%s' parameter cannot be empty" % (self.name))
return value
def to_string(self, value):
"""
Convert the field to a string value that can be returned. Should throw a FieldValidationException if the data is invalid.
Arguments:
value -- The value to convert
"""
return str(value)
class BooleanField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value in [True, False]:
return value
elif str(value).strip().lower() in ["true", "1"]:
return True
elif str(value).strip().lower() in ["false", "0"]:
return False
raise FieldValidationException("The value of '%s' for the '%s' parameter is not a valid boolean" % (str(value), self.name))
def to_string(self, value):
if value == True:
return "1"
elif value == False:
return "0"
return str(value)
def get_data_type(self):
return Field.DATA_TYPE_BOOLEAN
class ListField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
return value.split(",")
else:
return []
def to_string(self, value):
if value is not None:
return ",".join(value)
return ""
class RegexField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return re.compile(value)
except Exception as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return value.pattern
return ""
class IntegerField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return int(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class FloatField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return float(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class RangeField(Field):
def __init__(self, name, title, description, low, high, none_allowed=False, empty_allowed=True):
super(RangeField, self).__init__(name, title, description, none_allowed=False, empty_allowed=True)
self.low = low
self.high = high
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
tmp = int(value)
return tmp >= self.low and tmp <= self.high
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class URLField(Field):
"""
Represents a URL. The URL is converted to a Python object that was created via urlparse.
"""
@classmethod
def parse_url(cls, value, name):
parsed_value = urlparse(value)
if parsed_value.hostname is None or len(parsed_value.hostname) <= 0:
raise FieldValidationException("The value of '%s' for the '%s' parameter does not contain a host name" % (str(value), name))
if parsed_value.scheme not in ["http", "https"]:
raise FieldValidationException("The value of '%s' for the '%s' parameter does not contain a valid protocol (only http and https are supported)" % (str(value), name))
return parsed_value
def to_python(self, value):
Field.to_python(self, value)
return URLField.parse_url(value, self.name)
def to_string(self, value):
return value.geturl()
class DurationField(Field):
"""
The duration field represents a duration as represented by a string such as 1d for a 24 hour period.
The string is converted to an integer indicating the number of seconds.
"""
DURATION_RE = re.compile("(?P<duration>[0-9]+)\s*(?P<units>[a-z]*)", re.IGNORECASE)
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
WEEK = 7 * DAY
UNITS = {
'w' : WEEK,
'week' : WEEK,
'd' : DAY,
'day' : DAY,
'h' : HOUR,
'hour' : HOUR,
'm' : MINUTE,
'min' : MINUTE,
'minute' : MINUTE,
's' : 1
}
def to_python(self, value):
Field.to_python(self, value)
# Parse the duration
m = DurationField.DURATION_RE.match(value)
# Make sure the duration could be parsed
if m is None:
raise FieldValidationException("The value of '%s' for the '%s' parameter is not a valid duration" % (str(value), self.name))
# Get the units and duration
d = m.groupdict()
units = d['units']
# Parse the value provided
try:
duration = int(d['duration'])
except ValueError:
raise FieldValidationException("The duration '%s' for the '%s' parameter is not a valid number" % (d['duration'], self.name))
# Make sure the units are valid
if len(units) > 0 and units not in DurationField.UNITS:
raise FieldValidationException("The unit '%s' for the '%s' parameter is not a valid unit of duration" % (units, self.name))
# Convert the units to seconds
if len(units) > 0:
return duration * DurationField.UNITS[units]
else:
return duration
def to_string(self, value):
return str(value)
class ModularInputConfig():
def __init__(self, server_host, server_uri, session_key, checkpoint_dir, configuration):
self.server_host = server_host
self.server_uri = server_uri
self.session_key = session_key
self.checkpoint_dir = checkpoint_dir
self.configuration = configuration
def __str__(self):
attrs = ['server_host', 'server_uri', 'session_key', 'checkpoint_dir', 'configuration']
return str({attr: str(getattr(self, attr)) for attr in attrs})
@staticmethod
def get_text(node, default=None):
"""
Get the value of the text in the first node under the given node.
Arguments:
node -- The node that should have a text node under it.
default -- The default text that ought to be returned if no text node could be found (defaults to none).
"""
if node and node.firstChild and node.firstChild.nodeType == node.firstChild.TEXT_NODE:
return node.firstChild.data
else:
return default
@staticmethod
def get_config_from_xml(config_str_xml):
"""
Get the config from the given XML and return a ModularInputConfig instance.
Arguments:
config_str_xml -- A string of XML that represents the configuration provided by Splunk.
"""
# Here are the parameters we are going to fill out
server_host = None
server_uri = None
session_key = None
checkpoint_dir = None
configuration = {}
# Parse the document
doc = xml.dom.minidom.parseString(config_str_xml)
root = doc.documentElement
# Get the server_host
server_host_node = root.getElementsByTagName("server_host")[0]
server_host = ModularInputConfig.get_text(server_host_node)
# Get the server_uri
server_uri_node = root.getElementsByTagName("server_uri")[0]
server_uri = ModularInputConfig.get_text(server_uri_node)
# Get the session_key
session_key_node = root.getElementsByTagName("session_key")[0]
session_key = ModularInputConfig.get_text(session_key_node)
# Get the checkpoint directory
checkpoint_node = root.getElementsByTagName("checkpoint_dir")[0]
checkpoint_dir = ModularInputConfig.get_text(checkpoint_node)
# Parse the config
conf_node = root.getElementsByTagName("configuration")[0]
if conf_node:
for stanza in conf_node.getElementsByTagName("stanza"):
config = {}
if stanza:
stanza_name = stanza.getAttribute("name")
if stanza_name:
config["name"] = stanza_name
params = stanza.getElementsByTagName("param")
for param in params:
param_name = param.getAttribute("name")
config[param_name] = ModularInputConfig.get_text(param)
configuration[stanza_name] = config
return ModularInputConfig(server_host, server_uri, session_key, checkpoint_dir, configuration)
class ModularInput():
# These arguments cover the standard fields that are always supplied
standard_args = [
Field("name", "Stanza name", "The name of the stanza for this modular input", empty_allowed=True),
Field("stanza", "Stanza name", "The name of the stanza for this modular input", empty_allowed=True),
Field("source", "Source", "The source for events created by this modular input", empty_allowed=True),
Field("sourcetype", "Stanza name", "The name of the stanza for this modular input", empty_allowed=True, none_allowed=True),
Field("index", "Index", "The index that data should be sent to", empty_allowed=True, none_allowed=True),
Field("host", "Host", "The host that is running the input", empty_allowed=True),
BooleanField("disabled", "Disabled", "Whether the modular input is disabled or not", empty_allowed=True)
]
def _is_valid_param(self, name, val):
'''Raise an error if the parameter is None or empty.'''
if val is None:
raise ValueError("The {0} parameter cannot be none".format(name))
if len(val.strip()) == 0:
raise ValueError("The {0} parameter cannot be empty".format(name))
return val
def _create_formatter_textnode(self, xmldoc, nodename, value):
'''Shortcut for creating a formatter textnode.
Arguments:
xmldoc - A Document object.
nodename - A string name for the node.
'''
node = xmldoc.createElement(nodename)
text = xmldoc.createTextNode(str(value))
node.appendChild(text)
return node
def _create_document(self):
'''Create the document for sending XML streaming events.'''
doc = Document()
# Create the <stream> base element
stream = doc.createElement('stream')
doc.appendChild(stream)
return doc
def _create_event(self, doc, params, stanza, unbroken=False, close=True):
'''Create an event for XML streaming output.
Arguments:
doc - a Document object.
params - a dictionary of attributes for the event.
stanza_name - the stanza
'''
# Create the <event> base element
event = doc.createElement('event')
# Indicate if this event is to be unbroken (meaning a </done> tag will
# need to be added by a future event.
if unbroken:
event.setAttribute('unbroken', '1')
# Indicate if this script is single-instance mode or not.
if self.streaming_mode == 'true':
event.setAttribute('stanza', stanza)
# Define the possible elements
valid_elements = ['host', 'index', 'source', 'sourcetype', 'time', 'data']
# Append the valid child elements. Invalid elements will be dropped.
for element in filter(lambda x: x in valid_elements, params.keys()):
event.appendChild(self._create_formatter_textnode(doc, element, params[element]))
if close:
event.appendChild(doc.createElement('done'))
return event
def _print_event(self, doc, event):
'''Adds an event to XML streaming output.'''
# Get the stream from the document.
stream = doc.firstChild
# Append the event.
stream.appendChild(event)
# Return the content as a string WITHOUT the XML header; remove the
# child object so the next event can be returned and reuse the same
# Document object.
output = doc.documentElement.toxml()
stream.removeChild(event)
return output
def _add_events(self, doc, events):
'''Adds a set of events to XML streaming output.'''
# Get the stream from the document.
stream = doc.firstChild
# Add the <event> node.
for event in events:
stream.appendChild(event)
# Return the content as a string WITHOUT the XML header.
return doc.documentElement.toxml()
def escape_spaces(self, s):
"""
If the string contains spaces, then add double quotes around the string. This is useful when outputting fields and values to Splunk since a space will cause Splunk to not recognize the entire value.
Arguments:
s -- A string to escape.
"""
# Make sure the input is a string
if s is not None:
s = str(s)
if s is not None and " " in s:
return '"' + s + '"'
else:
return s
def create_event_string(self, data_dict, stanza, sourcetype, source, index, host=None, unbroken=False, close=False ):
"""
Create a string representing the event.
Argument:
data_dict -- A dictionary containing the fields
stanza -- The stanza used for the input
sourcetype -- The sourcetype
source -- The source field value
index -- The index to send the event to
unbroken --
close --
"""
# Make the content of the event
data_str = ''
for k, v in data_dict.items():
# If the value is a list, then write out each matching value with the same name (as mv)
if isinstance(v, list) and not isinstance(v, basestring):
values = v
else:
values = [v]
k_escaped = self.escape_spaces(k)
# Write out each value
for v in values:
v_escaped = self.escape_spaces(v)
if len(data_str) > 0:
data_str += ' '
data_str += '%s=%s' % (k_escaped, v_escaped)
# Make the event
event_dict = {'stanza': stanza,
'data' : data_str}
if index is not None:
event_dict['index'] = index
if sourcetype is not None:
event_dict['sourcetype'] = sourcetype
if source is not None:
event_dict['source'] = source
if host is not None:
event_dict['host'] = host
event = self._create_event(self.document,
params=event_dict,
stanza=stanza,
unbroken=False,
close=False)
# If using unbroken events, the last event must have been
# added with a "</done>" tag.
return self._print_event(self.document, event)
def output_event(self, data_dict, stanza, index=None, sourcetype=None, source=None, host=None, unbroken=False, close=False, out=sys.stdout ):
"""
Output the given even so that Splunk can see it.
Arguments:
data_dict -- A dictionary containing the fields
stanza -- The stanza used for the input
sourcetype -- The sourcetype
source -- The source to use
index -- The index to send the event to
unbroken --
close --
out -- The stream to send the event to (defaults to standard output)
host -- The host
"""
output = self.create_event_string(data_dict, stanza, sourcetype, source, index, host, unbroken, close)
out.write(output)
out.flush()
def __init__(self, scheme_args, args=None, sleep_interval=5):
"""
Set up the modular input.
Arguments:
title -- The title of the modular input (e.g. "Database Connector")
description -- A description of the input (e.g. "Get data from a database")
args -- A list of Field instances for validating the arguments
sleep_interval -- How often to sleep between runs
"""
# Setup defaults
default_scheme_args = {
"use_external_validation" : "true",
"streaming_mode" : "xml",
"use_single_instance" : "true"
}
scheme_args = dict(default_scheme_args.items() + scheme_args.items())
# Set the scheme arguments.
for arg in scheme_args:
setattr(self, arg, self._is_valid_param(arg, scheme_args.get(arg)))
if args is None:
self.args = []
else:
self.args = args[:]
if sleep_interval > 0:
self.sleep_interval = sleep_interval
else:
self.sleep_interval = 5
# Create the document used for sending events to Splunk through
self.document = self._create_document()
def addArg(self, arg):
"""
Add a given argument to the list of arguments.
Arguments:
arg -- An instance of Field that represents an argument.
"""
if self.args is None:
self.args = []
self.args.append(arg)
def usage(self, out=sys.stdout):
"""
Print a usage statement.
Arguments:
out -- The stream to write the message to (defaults to standard output)
"""
out.write("usage: %s [--scheme|--validate-arguments]")
def do_scheme(self, out=sys.stdout):
"""
Get the scheme and write it out to standard output.
Arguments:
out -- The stream to write the message to (defaults to standard output)
"""
logger.info("Modular input: scheme requested")
out.write(self.get_scheme())
return True
def get_scheme(self):
"""
Get the scheme of the inputs parameters and return as a string.
"""
# Create the XML document
doc = Document()
# Create the <scheme> base element
element_scheme = doc.createElement("scheme")
doc.appendChild(element_scheme)
# Create the title element
element_title = doc.createElement("title")
element_scheme.appendChild(element_title)
element_title_text = doc.createTextNode(self.title)
element_title.appendChild(element_title_text)
# Create the description element
element_desc = doc.createElement("description")
element_scheme.appendChild(element_desc)
element_desc_text = doc.createTextNode(self.description)
element_desc.appendChild(element_desc_text)
# Create the use_external_validation element
element_external_validation = doc.createElement("use_external_validation")
element_scheme.appendChild(element_external_validation)
element_external_validation_text = doc.createTextNode(self.use_external_validation)
element_external_validation.appendChild(element_external_validation_text)
# Create the streaming_mode element
element_streaming_mode = doc.createElement("streaming_mode")
element_scheme.appendChild(element_streaming_mode)
element_streaming_mode_text = doc.createTextNode(self.streaming_mode)
element_streaming_mode.appendChild(element_streaming_mode_text)
# Create the use_single_instance element
element_use_single_instance = doc.createElement("use_single_instance")
element_scheme.appendChild(element_use_single_instance)
element_use_single_instance_text = doc.createTextNode(self.use_single_instance)
element_use_single_instance.appendChild(element_use_single_instance_text)
# Create the elements to stored args element
element_endpoint = doc.createElement("endpoint")
element_scheme.appendChild(element_endpoint)
element_args = doc.createElement("args")
element_endpoint.appendChild(element_args)
# Create the argument elements
self.add_xml_args(doc, element_args)
# Return the content as a string
return doc.toxml()
def add_xml_args(self, doc, element_args):
"""
Add the arguments to the XML scheme.
Arguments:
doc -- The XML document
element_args -- The element that should be the parent of the arg elements that will be added.
"""
for arg in self.args:
element_arg = doc.createElement("arg")
element_arg.setAttribute("name", arg.name)
element_args.appendChild(element_arg)
# Create the title element
element_title = doc.createElement("title")
element_arg.appendChild(element_title)
element_title_text = doc.createTextNode(arg.title)
element_title.appendChild(element_title_text)
# Create the description element
element_desc = doc.createElement("description")
element_arg.appendChild(element_desc)
element_desc_text = doc.createTextNode(arg.description)
element_desc.appendChild(element_desc_text)
# Create the data_type element
element_data_type = doc.createElement("data_type")
element_arg.appendChild(element_data_type)
element_data_type_text = doc.createTextNode(arg.get_data_type())
element_data_type.appendChild(element_data_type_text)
# Create the required_on_create element
element_required_on_create = doc.createElement("required_on_create")
element_arg.appendChild(element_required_on_create)
element_required_on_create_text = doc.createTextNode("true" if arg.required_on_create else "false")
element_required_on_create.appendChild(element_required_on_create_text)
# Create the required_on_save element
element_required_on_edit = doc.createElement("required_on_edit")
element_arg.appendChild(element_required_on_edit)
element_required_on_edit_text = doc.createTextNode("true" if arg.required_on_edit else "false")
element_required_on_edit.appendChild(element_required_on_edit_text)
def do_validation(self, in_stream=sys.stdin):
"""
Get the validation data from standard input and attempt to validate it. Returns true if the arguments validated, false otherwise.
Arguments:
in_stream -- The stream to get the input from (defaults to standard input)
"""
data = self.get_validation_data()
try:
self.validate_parameters(None, data)
return True
except FieldValidationException as e:
self.print_error(str(e))
return False
def validate(self, arguments):
"""
Validate the argument dictionary where each key is a stanza.
Arguments:
arguments -- The arguments as an dictionary where the key is the stanza and the value is a dictionary of the values.
"""
# Check each stanza
for stanza, parameters in arguments.items():
self.validate_parameters(stanza, parameters)
return True
def validate_parameters(self, stanza, parameters):
"""
Validate the parameter set for a stanza and returns a dictionary of cleaner parameters.
Arguments:
stanza -- The stanza name
parameters -- The list of parameters
"""
cleaned_params = {}
# Append the arguments list such that the standard fields that Splunk provides are included
all_args = {}
for a in self.standard_args:
all_args[a.name] = a
for a in self.args:
all_args[a.name] = a
# Convert and check the parameters
for name, value in parameters.items():
# If the argument was found, then validate and convert it
if name in all_args:
cleaned_params[name] = all_args[name].to_python(value)
# Throw an exception if the argument could not be found
else:
raise FieldValidationException("The parameter '%s' is not a valid argument" % (name))
return cleaned_params
def print_error(self, error, out=sys.stdout):
"""
Prints the given error message to standard output.
Arguments:
error -- The message to be printed
out -- The stream to write the message to (defaults to standard output)
"""
out.write("<error><message>%s</message></error>" % error)
def read_config(self, in_stream=sys.stdin):
"""
Read the config from standard input and return the configuration.
in_stream -- The stream to get the input from (defaults to standard input)
"""
config_str_xml = in_stream.read()
return ModularInputConfig.get_config_from_xml(config_str_xml)
def run(self, stanza, cleaned_params):
"""
Run the input using the arguments provided.
Arguments:
stanza -- The name of the stanza
cleaned_params -- The arguments following validation and conversion to Python objects.
"""
raise Exception("Run function was not implemented")
@classmethod
def is_expired( cls, last_run, interval, cur_time=None ):
"""
Indicates if the last run time is expired based on the value of the last_run parameter.
Arguments:
last_run -- The time that the analysis was last done
interval -- The interval that the analysis ought to be done (as an integer)
cur_time -- The current time (will be automatically determined if not provided)
"""
if cur_time is None:
cur_time = time.time()
if last_run is None:
return True
elif (last_run + interval) < cur_time:
return True
else:
return False
@classmethod
def last_ran( cls, checkpoint_dir, stanza ):
"""
Determines the date that the analysis was last performed for the given input (denoted by the stanza name).
Arguments:
checkpoint_dir -- The directory where checkpoints ought to be saved
stanza -- The stanza of the input being used
"""
checkpoint_dict = cls.get_checkpoint_data(checkpoint_dir, stanza)
if checkpoint_dict is None or 'last_run' not in checkpoint_dict:
logger.info('Last run time could not be loaded for stanza=%s, checkpoint_dir="$r"', stanza, checkpoint_dir)
return None
else:
return checkpoint_dict['last_run']
@classmethod
def needs_another_run(cls, checkpoint_dir, stanza, interval, cur_time=None):
"""
Determines if the given input (denoted by the stanza name) ought to be executed.
Arguments:
checkpoint_dir -- The directory where checkpoints ought to be saved
stanza -- The stanza of the input being used
interval -- The frequency that the analysis ought to be performed
cur_time -- The current time (will be automatically determined if not provided)
"""
try:
last_ran = cls.last_ran(checkpoint_dir, stanza)
return cls.is_expired(last_ran, interval, cur_time)
except IOError as e:
# The file likely doesn't exist
return True
except ValueError as e:
# The file could not be loaded
return True
# Default return value
return True
@classmethod
def get_file_path(cls, checkpoint_dir, stanza):
"""
Get the path to the checkpoint file.
Arguments:
checkpoint_dir -- The directory where checkpoints ought to be saved
stanza -- The stanza of the input being used
"""
return os.path.join( checkpoint_dir, hashlib.sha224(stanza).hexdigest() + ".json" )
@classmethod
def get_checkpoint_data(cls, checkpoint_dir, stanza):
"""
Gets the checkpoint for this input (if it exists)
Arguments:
checkpoint_dir -- The directory where checkpoints ought to be saved
stanza -- The stanza of the input being used
"""
fp = None
try:
fp = open( cls.get_file_path(checkpoint_dir, stanza) )
checkpoint_dict = json.load(fp)
return checkpoint_dict
finally:
if fp is not None:
fp.close()
@classmethod
def get_non_deviated_last_run(cls, last_ran, interval):
"""
This method will return a last_run time that doesn't carry over the processing time.
If you used the current time and the script took 5 seconds to run, then next run will actually be 5 seconds after it should have been.
Basically, it computes when the interval _should_ have executed so that the input runs on the correct frequency.
Arguments:
interval -- The execution interval
last_ran -- When the input last ran (Unix epoch).
"""
# We don't want the input to interval to slide by including the processing time in the interval. In other words, if the interval is 60 and it takes 5 seconds to process,
# then we don't just want to set the last_run to now because then the interval would actually be 65 seconds. So, let's assume that the processing time was 0 and we are
# right on time. If we assume this, then we would have ran at last_run + interval exactly.
# There is a potential problem with this though. We'll deal with that in a bit.
last_ran_derived = last_ran + interval
# There is a one problem with correcting the last run to the previous time plus the interval. If the input failed to run for a long time, then we might keep creating a
# last_run that is in the past and thus, keep executing the input until we finally come to the current time. I would rather just skip the ones in the past and start back
# over. That is what we will do.
if last_ran_derived < (time.time() - interval):
# The last run isn't within one interval of the current time. That means we either ran too long and missed a subsequent run or we just weren't running for a long-time.
# To catch up, we'll set it to the current time
last_ran_derived = time.time()
logger.info("Previous run was too far in the past (gap=%r) and thus some executions of the input have been missed", last_ran_derived-last_ran)
#logger.info("Calculated non-deviated last_ran=%r from previous_last_ran=%r", last_ran_derived, last_ran)
return last_ran_derived
@classmethod
def save_checkpoint_data(cls, checkpoint_dir, stanza, data):
"""
Save the checkpoint state.
Arguments:
checkpoint_dir -- The directory where checkpoints ought to be saved
stanza -- The stanza of the input being used
data -- A dictionary with the data to save
"""
fp = None
try:
fp = open( cls.get_file_path(checkpoint_dir, stanza), 'w' )
json.dump(data, fp)
except Exception:
logger.exception("Failed to save checkpoint directory")
finally:
if fp is not None:
fp.close()
def do_shutdown(self):
"""
This function is called when the modular input should shut down.
"""
pass
def do_run(self, in_stream=sys.stdin, log_exception_and_continue=False):
"""
Read the config from standard input and return the configuration.
in_stream -- The stream to get the input from (defaults to standard input)
log_exception_and_continue -- If true, exceptions will not be thrown for invalid configurations and instead the stanza will be skipped.
"""
# Run the modular import
input_config = self.read_config(in_stream)
while True:
# If Splunk is no longer the parent process, then it has shut down and this input needs to terminate
if hasattr(os, 'getppid') and os.getppid() == 1:
logging.warn("Modular input is no longer running under Splunk; script will now exit")
self.do_shutdown()
sys.exit(2)
# Initialize the document that will be used to output the results
self.document = self._create_document()
for stanza, conf in input_config.configuration.items():
try:
cleaned_params = self.validate_parameters(stanza, conf)
self.run(stanza,
cleaned_params,
input_config)
except FieldValidationException as e:
if log_exception_and_continue:
logger.error("The input stanza '%s' is invalid: %s" % (stanza, str(e)))
else:
raise e
# Sleep for a bit
try:
time.sleep(self.sleep_interval)
except IOError:
pass #Exceptions such as KeyboardInterrupt and IOError can be thrown in order to interrupt sleep calls
def get_validation_data(self, in_stream=sys.stdin):
"""
Get the validation data from standard input
Arguments:
in_stream -- The stream to get the input from (defaults to standard input)
"""
val_data = {}
# Read everything from stdin
val_str = in_stream.read()
# Parse the validation XML
doc = xml.dom.minidom.parseString(val_str)
root = doc.documentElement
item_node = root.getElementsByTagName("item")[0]
if item_node:
name = item_node.getAttribute("name")
val_data["stanza"] = name
params_node = item_node.getElementsByTagName("param")
for param in params_node:
name = param.getAttribute("name")
if name and param.firstChild and param.firstChild.nodeType == param.firstChild.TEXT_NODE:
val_data[name] = param.firstChild.data
return val_data
def validate_parameters_from_cli(self, argument_array=None):
"""
Load the arguments from the given array (or from the command-line) and validate them.
Arguments:
argument_array -- An array of arguments (will get them from the command-line arguments if none)
"""
# Get the arguments from the sys.argv if not provided
if argument_array is None:
argument_array = sys.argv[1:]
# This is the list of parameters we will generate
parameters = {}
for i in range(0, len(self.args)):
arg = self.args[i]
if i < len(argument_array):
parameters[arg.name] = argument_array[i]
else:
parameters[arg.name] = None
# Now that we have simulated the parameters, go ahead and test them
self.validate_parameters("unnamed", parameters)
def execute(self, in_stream=sys.stdin, out_stream=sys.stdout):
"""
Get the arguments that were provided from the command-line and execute the script.
Arguments:
in_stream -- The stream to get the input from (defaults to standard input)
out_stream -- The stream to write the output to (defaults to standard output)
"""
try:
logger.info("Execute called")
if len(sys.argv) > 1:
if sys.argv[1] == "--scheme":
self.do_scheme(out_stream)
elif sys.argv[1] == "--validate-arguments":
logger.info("Modular input: validate arguments called")
# Exit with a code of -1 if validation failed
if self.do_validation() == False:
sys.exit(-1)
else:
self.usage(out_stream)
else:
# Run the modular input
self.do_run(in_stream, log_exception_and_continue=True)
logger.info("Execution completed successfully")
except Exception as e:
logger.error("Execution failed: %s", ( traceback.format_exc() ))
# Make sure to grab any exceptions so that we can print a valid error message
self.print_error(str(e), out_stream)
| |
#!/usr/bin/env python
import numpy as np
import scipy.special
from multiprocessing import Pool
_POISSON = .25
_N_PROCS = 4
def get_flexure_parameter(h, E, n_dim, gamma_mantle=33000.):
"""
Calculate the flexure parameter based on some physical constants. *h* is
the Effective elastic thickness of Earth's crust (m), *E* is Young's
Modulus, and *n_dim* is the number of spatial dimensions for which the
flexure parameter is used. The number of dimension must be either 1, or
2.
Examples
--------
>>> from __future__ import print_function
>>> from landlab.components.flexure import get_flexure_parameter
>>> eet = 65000.
>>> youngs = 7e10
>>> alpha = get_flexure_parameter(eet, youngs, 1)
>>> print('%.3f' % round(alpha, 3))
119965.926
>>> alpha = get_flexure_parameter(eet, youngs, 2)
>>> print('%.2f' % alpha)
84828.72
"""
D = E * pow(h, 3) / 12. / (1. - pow(_POISSON, 2))
assert(n_dim == 1 or n_dim == 2)
if n_dim == 2:
alpha = pow(D / gamma_mantle, .25)
else:
alpha = pow(4. * D / gamma_mantle, .25)
return alpha
def _calculate_distances(locs, coords):
if isinstance(locs[0], (float, int)):
return np.sqrt(pow(coords[0] - locs[0], 2) +
pow(coords[1] - locs[1], 2))
else:
r = pow(coords[0][:, np.newaxis] - locs[0], 2)
r += pow(coords[1][:, np.newaxis] - locs[1], 2)
return np.sqrt(r, out=r)
def _calculate_deflections(load, locs, coords, alpha, out=None,
gamma_mantle=33000.):
c = - load / (2. * np.pi * gamma_mantle * pow(alpha, 2.))
r = _calculate_distances(locs, coords) / alpha
if isinstance(c, (float, int)):
return np.multiply(scipy.special.kei(r), c, out=out)
else:
scipy.special.kei(r, out=r)
np.multiply(r, c[np.newaxis, :], out=r)
return np.sum(r, axis=1, out=out)
def subside_point_load(load, loc, coords, params=None, out=None):
"""Calculate deflection at points due a point load.
Calculate deflections on a grid, defined by the points in the *coords*
tuple, due to a point load of magnitude *load* applied at *loc*.
*x* and *y* are the x and y coordinates of each node of the solution
grid (in meters). The scalars *eet* and *youngs* define the crustal
properties.
Parameters
----------
load : float
Magnitude of the point load.
loc : float or tuple
Location of the load as either a scalar or as (*x*, *y*)
coords : ndarray
Array of points to calculate deflections at
params : dict-like
Physical parameters used for deflection calculation. Valid keys are
- *eet*: Effective elastic thickness
- *youngs*: Young's modulus
out : ndarray, optional
Array to put deflections into.
Returns
-------
out : ndarray
Array of deflections.
Examples
--------
>>> from landlab.components.flexure import subside_point_load
>>> params = dict(eet=65000., youngs=7e10)
>>> load = 1e9
Define a unifrom rectilinear grid.
>>> x = np.arange(0, 10000, 100.)
>>> y = np.arange(0, 5000, 100.)
>>> (x, y) = np.meshgrid(x, y)
>>> x.shape = (x.size, )
>>> y.shape = (y.size, )
Calculate deflections due to a load applied at position (5000., 2500.).
>>> import six
>>> x = np.arange(0, 10000, 1000.)
>>> y = np.arange(0, 5000, 1000.)
>>> (x, y) = np.meshgrid(x, y)
>>> x.shape = (x.size, )
>>> y.shape = (y.size, )
>>> dz = subside_point_load(load, (5000., 2500.), (x, y), params=params)
>>> print('%.5g' % round(dz.sum(), 9))
2.6267e-05
>>> six.print_(round(dz.min(), 9))
5.24e-07
>>> six.print_(round(dz.max(), 9))
5.26e-07
>>> dz = subside_point_load((1e9, 1e9), ((5000., 5000.), (2500., 2500.)),
... (x, y), params=params)
>>> six.print_(round(dz.min(), 9) / 2.)
5.235e-07
>>> six.print_(round(dz.max(), 9) / 2.)
5.265e-07
"""
params = params or dict(eet=6500., youngs=7.e10)
eet, youngs = params['eet'], params['youngs']
gamma_mantle = params.get('gamma_mantle', 33000.)
assert(len(loc) in [1, 2])
assert(len(coords) == len(loc))
assert(len(coords[0].shape) == 1)
if not isinstance(load, (int, float, np.ndarray)):
load = np.array(load)
if out is None:
out = np.empty(coords[0].size, dtype=np.float)
alpha = get_flexure_parameter(eet, youngs, len(loc),
gamma_mantle=gamma_mantle)
if len(loc) == 2:
_calculate_deflections(load, loc, coords, alpha, out=out,
gamma_mantle=gamma_mantle)
else:
c = load / (2. * alpha * gamma_mantle)
r = abs(coords[0] - loc[0]) / alpha
out[:] = c * np.exp(-r) * (np.cos(r) + np.sin(r))
return out
def subside_point_loads(loads, locs, coords, params=None, deflection=None,
n_procs=1):
"""Calculate deflection at points due multiple point loads.
Calculate lithospheric deflections due to *loads* at coordinates
specified by the *locs* tuple. *coords* is a tuple that gives the
coordinates of each point where deflections are calculated; *locs* is
positions of the applied loads. Since this function calculates the 1D
or 2D flexure equation, *coords* and *locs* must have either one or two
elements.
Parameters
----------
load : array_like
Magnitude of the point loads.
loc : tuple of (loc_x, loc_y)
Load locations.
coords : ndarray
Array of points to calculate deflections at
params : dict-like
Physical parameters used for deflection calculation. Valid keys are
- *eet*: Effective elastic thickness
- *youngs*: Young's modulus
- *gamma_mantle*: Specific weight of the mantle
out : ndarray, optional
Array to put deflections into.
Returns
-------
out : ndarray
Array of deflections.
"""
params = params or dict(eet=6500., youngs=7.e10)
eet, youngs = params['eet'], params['youngs']
gamma_mantle = params.get('gamma_mantle', 33000.)
if deflection is None:
deflection = np.empty(coords[0].size, dtype=np.float)
assert(len(coords) in [1, 2])
assert(len(locs) == len(coords))
assert(loads.size == locs[0].size)
if n_procs > 1:
_subside_in_parallel(deflection, loads, locs, coords, eet, youngs,
gamma_mantle, n_procs=n_procs)
else:
for index in loads.nonzero()[0]:
loc = [dim.flat[index] for dim in locs]
deflection += subside_point_load(loads.flat[index], loc,
coords, eet, youngs,
gamma_mantle)
return deflection
def _subside_point_load_helper(args):
return subside_point_load(*args)
def _subside_in_parallel(dz, loads, locs, coords, eet, youngs, gamma_mantle,
n_procs=4):
args = []
for index in loads.nonzero()[0]:
loc = (locs[0].flat[index], locs[1].flat[index])
args.append((loads.flat[index], loc, coords, eet, youngs,
gamma_mantle))
pool = Pool(processes=n_procs)
results = pool.map(_subside_point_load_helper, args)
for result in results:
try:
dz += result
except ValueError:
result.shape = dz.shape
dz += result
if __name__ == '__main__':
import doctest
doctest.testmod()
| |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbmonbindings_servicegroup_binding(base_resource) :
""" Binding class showing the servicegroup that can be bound to lbmonbindings.
"""
def __init__(self) :
self._servicegroupname = ""
self._servicetype = ""
self._boundservicegroupsvrstate = ""
self._monstate = ""
self._monitorname = ""
self.___count = 0
@property
def monitorname(self) :
ur"""The name of the monitor.<br/>Minimum length = 1.
"""
try :
return self._monitorname
except Exception as e:
raise e
@monitorname.setter
def monitorname(self, monitorname) :
ur"""The name of the monitor.<br/>Minimum length = 1
"""
try :
self._monitorname = monitorname
except Exception as e:
raise e
@property
def servicegroupname(self) :
ur"""The name of the service group.
"""
try :
return self._servicegroupname
except Exception as e:
raise e
@servicegroupname.setter
def servicegroupname(self, servicegroupname) :
ur"""The name of the service group.
"""
try :
self._servicegroupname = servicegroupname
except Exception as e:
raise e
@property
def boundservicegroupsvrstate(self) :
ur"""The state of the servicegroup.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._boundservicegroupsvrstate
except Exception as e:
raise e
@property
def monstate(self) :
ur"""The configured state (enable/disable) of Monitor on this service.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._monstate
except Exception as e:
raise e
@property
def servicetype(self) :
ur"""The type of service.<br/>Possible values = HTTP, FTP, TCP, UDP, SSL, SSL_BRIDGE, SSL_TCP, DTLS, NNTP, RPCSVR, DNS, ADNS, SNMP, RTSP, DHCPRA, ANY, SIP_UDP, DNS_TCP, ADNS_TCP, MYSQL, MSSQL, ORACLE, RADIUS, RDP, DIAMETER, SSL_DIAMETER, TFTP.
"""
try :
return self._servicetype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbmonbindings_servicegroup_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbmonbindings_servicegroup_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.monitorname is not None :
return str(self.monitorname)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, monitorname) :
ur""" Use this API to fetch lbmonbindings_servicegroup_binding resources.
"""
try :
obj = lbmonbindings_servicegroup_binding()
obj.monitorname = monitorname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, monitorname, filter_) :
ur""" Use this API to fetch filtered set of lbmonbindings_servicegroup_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbmonbindings_servicegroup_binding()
obj.monitorname = monitorname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, monitorname) :
ur""" Use this API to count lbmonbindings_servicegroup_binding resources configued on NetScaler.
"""
try :
obj = lbmonbindings_servicegroup_binding()
obj.monitorname = monitorname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, monitorname, filter_) :
ur""" Use this API to count the filtered set of lbmonbindings_servicegroup_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbmonbindings_servicegroup_binding()
obj.monitorname = monitorname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Boundservicegroupsvrstate:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Servicetype:
HTTP = "HTTP"
FTP = "FTP"
TCP = "TCP"
UDP = "UDP"
SSL = "SSL"
SSL_BRIDGE = "SSL_BRIDGE"
SSL_TCP = "SSL_TCP"
DTLS = "DTLS"
NNTP = "NNTP"
RPCSVR = "RPCSVR"
DNS = "DNS"
ADNS = "ADNS"
SNMP = "SNMP"
RTSP = "RTSP"
DHCPRA = "DHCPRA"
ANY = "ANY"
SIP_UDP = "SIP_UDP"
DNS_TCP = "DNS_TCP"
ADNS_TCP = "ADNS_TCP"
MYSQL = "MYSQL"
MSSQL = "MSSQL"
ORACLE = "ORACLE"
RADIUS = "RADIUS"
RDP = "RDP"
DIAMETER = "DIAMETER"
SSL_DIAMETER = "SSL_DIAMETER"
TFTP = "TFTP"
class Monstate:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class lbmonbindings_servicegroup_binding_response(base_response) :
def __init__(self, length=1) :
self.lbmonbindings_servicegroup_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbmonbindings_servicegroup_binding = [lbmonbindings_servicegroup_binding() for _ in range(length)]
| |
import Priors
class Parameter(object):
def __init__(self,name,initValue,minValue,maxValue,delta,**kwargs):
self.name = str(name)
self.value = initValue
self.minValue = minValue
self.maxValue = maxValue
self.delta = delta
self.unit = ''
self.fixed = False
self.nuisance = False
self.dataset = None
self.callback = []
self.normalization = False
for k,v in kwargs.iteritems():
if(k.lower()=='unit'):
self.unit = str(v)
elif(k.lower()=='normalization'):
self.normalization = bool(v)
elif(k.lower()=='fixed'):
self.fixed = bool(v)
elif(k.lower()=='nuisance'):
self.nuisance = bool(v)
elif(k.lower()=='dataset'):
self.dataset = v
pass
pass
#Default prior is a uniform prior
if(self.normalization):
#This is a scale parameter
self.setPrior(Priors.LogUniformPrior(self.minValue,self.maxValue))
else:
self.setPrior(Priors.UniformPrior(self.minValue,self.maxValue))
pass
def setCallback(self,callback):
#The callback functions will be executed on any parameter value change
self.callback.append(callback)
pass
def __eq__(self,value):
self.setValue(value)
def __repr__(self):
if(self.fixed):
ff = "fixed"
else:
ff = "free"
pass
return "%20s: %10g %10g %10g %10g %s %s" %(self.name,self.value,self.minValue,self.maxValue,self.delta,ff,self.unit)
pass
def getValue(self):
return self.value
def setValue(self,value):
self.value = float(value)
if(abs(self.delta) > 0.2*abs(self.value)):
#Fix the delta to be less than 50% of the value
self.delta = 0.2 * self.value
for c in self.callback:
c()
pass
def setBounds(self,minValue,maxValue):
self.minValue = minValue
self.maxValue = maxValue
self.prior.setBounds(minValue,maxValue)
pass
def setDelta(self,delta):
self.delta = delta
pass
def setPrior(self,prior):
self.prior = prior
pass
def setDataset(self,dataset):
self.dataset = dataset
def fix(self):
self.fixed = True
pass
def free(self):
self.fixed = False
pass
def isNuisance(self):
return self.nuisance
def isNormalization(self):
return self.normalization
def isFixed(self):
return self.fixed
def isFree(self):
return (not self.fixed)
pass
class SpatialParameter(object):
#this class provides a place holder for spatial parameters that vary with energy, for the moment works exactly as the regular parameter with value indepedent from energy
def __init__(self,name,initValue,minValue,maxValue,delta,**kwargs):
self.name = str(name)
self.value = initValue
self.minValue = minValue
self.maxValue = maxValue
self.delta = delta
self.unit = ''
self.fixed = False
self.nuisance = False
self.dataset = None
self.callback = []
self.normalization = False
for k,v in kwargs.iteritems():
if(k.lower()=='unit'):
self.unit = str(v)
elif(k.lower()=='normalization'):
self.normalization = bool(v)
elif(k.lower()=='fixed'):
self.fixed = bool(v)
elif(k.lower()=='nuisance'):
self.nuisance = bool(v)
elif(k.lower()=='dataset'):
self.dataset = v
pass
pass
#Default prior is a uniform prior
if(self.normalization):
#This is a scale parameter
self.setPrior(Priors.LogUniformPrior(self.minValue,self.maxValue))
else:
self.setPrior(Priors.UniformPrior(self.minValue,self.maxValue))
pass
def setCallback(self,callback):
#The callback functions will be executed on any parameter value change
self.callback.append(callback)
pass
def __eq__(self,value):
self.setValue(value)
def __repr__(self):
if(self.fixed):
ff = "fixed"
else:
ff = "free"
pass
return "%20s: %10g %10g %10g %10g %s %s" %(self.name,self.value,self.minValue,self.maxValue,self.delta,ff,self.unit)
pass
def setValue(self,value):
self.value = float(value)
if(abs(self.delta) > 0.2*abs(self.value)):
#Fix the delta to be less than 50% of the value
self.delta = 0.2 * self.value
for c in self.callback:
c()
pass
def getValue(self,energy):
return self.value
pass
def setBounds(self,minValue,maxValue):
self.minValue = minValue
self.maxValue = maxValue
self.prior.setBounds(minValue,maxValue)
pass
def setDelta(self,delta):
self.delta = delta
pass
def setPrior(self,prior):
self.prior = prior
pass
def setDataset(self,dataset):
self.dataset = dataset
def fix(self):
self.fixed = True
pass
def free(self):
self.fixed = False
pass
def isNuisance(self):
return self.nuisance
def isNormalization(self):
return self.normalization
def isFixed(self):
return self.fixed
def isFree(self):
return (not self.fixed)
pass
| |
"""Custom widgets composed from standard tkinter widgets"""
from tkinter import *
class EntryFrame(Frame):
"""
A tkinter Frame that holds a labeled entry widget with added behavior.
EntryFrame will call the function (provided as 'model' in the arguments)
when a change in the entry's value is committed.
EntryFrame is intended as a new base class that will be inherited from.
For example, the initialize() method needs to be overwritten to change
the default initial entry of 0.00.
Arguments (in addition to standard Frame options):
name-- for widget label and introspection
model-- a function that will request a calculation from the Model
"""
def __init__(self, parent=None, name='', color='white',
model=None,
**options):
"""
__init__ is broken into multiple method references, to allow
subclasses to modify as needed.
"""
Frame.__init__(self, parent, relief=RIDGE, borderwidth=0,
background=color, **options)
self.name = name
self.color = color
self.model = model
self.initialize()
self.add_label()
self.add_entry()
self.bind_entry()
self.validate_entry()
def initialize(self):
"""
Create a StringVar object; initialize self.value with the initial
number, and initialize StringVar with that same value.
Subclasses of EntryFrame should overwrite this function to accomodate
however
initial values are passed into them.
"""
self.value_var = StringVar()
self.value = 0.0
self.value_var.set(self.value)
def add_label(self):
Label(self, text=self.name, bg=self.color, bd=0).pack(side=TOP)
def add_entry(self):
"""
Subclasses of EntryBox that use a different entry widget (e.g. SpinBox)
should overwrite this function.
"""
self.entry = Entry(self, width=7,
validate='key') # check for number on keypress)
self.entry.pack(side=TOP, fill=X)
self.entry.config(textvariable=self.value_var)
def bind_entry(self):
"""
EntryFrame assumes action should only be taken when a change in the
Entry widget is "committed" by hitting Return, Tab, or clicking
outside the widget.
Subclasses may overwrite/extend bind_entry to tailor behavior
"""
self.entry.bind('<Return>', lambda event: self.on_return(event))
self.entry.bind('<Tab>', lambda event: self.on_tab(event))
self.entry.bind('<FocusOut>', lambda event: self.refresh())
def on_return(self, event):
self.refresh()
self.find_next_entry(self.entry).focus()
def refresh(self):
if self.entry_is_changed():
self.save_entry()
self.model()
def entry_is_changed(self):
return self.value != float(self.value_var.get())
def find_next_entry(self, current_widget):
"""
Looks at the next entry in tkinter's widget traversal. If it is not of
type Entry or Spinbox, it keeps looking until it finds one.
Subclasses can modify this behavior if other widget types are to be
acknowledged.
:param current_widget: the widget that needs focus changed to the
next entry-like widget
:return: the next entry-like widget
"""
next_entry = current_widget.tk_focusNext()
if next_entry.widgetName in ['entry', 'spinbox']:
return next_entry
else:
return self.find_next_entry(next_entry)
def validate_entry(self):
"""
The base EntryFrame class assumes the entry contents should be numerical
"""
# check on each keypress if new result will be a number
self.entry['validatecommand'] = (self.register(self.is_number), '%P')
# sound 'bell' if bad keypress
self.entry['invalidcommand'] = 'bell'
@staticmethod
def is_number(entry):
"""
tests to see if entry is acceptable (either empty, or able to be
converted to a float.)
"""
if not entry:
return True # Empty string: OK if entire entry deleted
try:
float(entry)
return True
except ValueError:
return False
def on_tab(self, event):
self.on_return(event)
return 'break' # override default tkinter tab behavior
def save_entry(self):
"""
Saves widget's entry as self.stored_value , filling the entry with
0.00 if it was empty.
Subclasses should overwrite save_entry to suit needs of their data
type and call to model
"""
if not self.value_var.get(): # if entry left blank,
self.value_var.set(0.00) # fill it with zero
value = float(self.value_var.get())
self.value = value
class ArrayBox(EntryFrame):
"""
Modifies EntryFrame to accept a numpy 2D-array, and a coordinate to a
specific cell in the array to read to/write from.
"""
def __init__(self, parent=None,
array=None, coord=(0, 0),
**options):
self.array = array
self.row, self.col = coord
EntryFrame.__init__(self, parent,
# name, color,
**options)
def initialize(self):
self.value_var = StringVar()
self.value = self.array[self.row, self.col]
self.value_var.set(self.value)
def save_entry(self):
"""
Records widget's status to the array, filling the entry with
0.00 if it was empty.
Currently assumes, if the array is 2D, that it is meant to be
symmetric, and auto-updates the cross-diagonal element as well.
"""
if not self.value_var.get(): # if entry left blank,
self.value_var.set(0.00) # fill it with zero
self.value = float(self.value_var.get())
self.array[self.row, self.col] = self.value
if self.array.shape[0] > 1: # if more than one row, assume J matrix
self.array[self.col, self.row] = self.value # fill cross-diagonal
# element
class ArraySpinBox(ArrayBox):
"""
Modifies ArraySpinBox to use a SpinBox instead of an Entry widget.
Arguments (in addition to standard ArrayBox options):
from_, to, increment: SpinBox arguments (minimum and maximum values,
and incremental change on each arrow click)
realtime: True if data/model should be refreshed as the SpinBox arrow
button is held down.
"""
def __init__(self, parent=None, from_=0.00, to=100.00, increment=1,
realtime=False,
**options):
self.realtime = realtime
self.spinbox_kwargs = {'from_': from_,
'to': to,
'increment': increment}
ArrayBox.__init__(self, parent, **options)
def add_entry(self):
self.add_spinbox(**self.spinbox_kwargs)
def add_spinbox(self, **kwargs):
self.entry = Spinbox(self, width=7,
validate='key', # check for number on keypress
# from_=-100000, to=100000, increment=1
**kwargs
)
self.entry.pack(side=TOP, fill=X)
self.entry.config(textvariable=self.value_var)
def bind_entry(self):
self.entry.bind('<Return>', lambda event: self.on_return(event))
self.entry.bind('<Tab>', lambda event: self.on_tab(event))
self.entry.bind('<FocusOut>', lambda event: self.refresh())
self.entry.bind('<ButtonPress-1>', lambda event: self.on_press())
self.entry.bind('<ButtonRelease-1>', lambda event: self.on_release())
def on_press(self):
if self.realtime:
self.loop_refresh()
def loop_refresh(self):
self.refresh()
self.button_held_job = self._root().after(50, self.loop_refresh)
def on_release(self):
if self.realtime:
self._root().after_cancel(self.button_held_job)
# A 1-ms delay allows the StringVar to be updated prior to the
# entry_is_changed check. See related StackOverflow question:
# https://stackoverflow.com/questions/46504930/
self.after(1, self.refresh)
if __name__ == '__main__':
import numpy as np
dummy_array = np.array([[1, 42, 99]])
root = Tk()
root.title('test widgets')
class TestFrame(Frame):
def __init__(self, parent, **options):
Frame.__init__(self, parent, **options)
def call_model(self):
for child in self.winfo_children():
print('I have child: ', child.name)
print('requesting calculation from the model')
print(dummy_array)
mainwindow = TestFrame(root)
mainwindow.pack()
baseclass = EntryFrame(mainwindow, name='baseclass',
model=mainwindow.call_model)
baseclass.pack(side=LEFT)
newarray = ArrayBox(mainwindow, array=dummy_array, coord=(0, 1),
name='V42', model=mainwindow.call_model)
newarray.pack(side=LEFT)
newspinbox = ArraySpinBox(mainwindow, array=dummy_array, coord=(0, 2),
name='V99', model=mainwindow.call_model)
newspinbox.pack(side=LEFT)
# Add space to right to debug spinbox arrows
# Label(mainwindow, text='spacer', bg='white', bd=0).pack(side=LEFT)
# workaround fix for Tk problems and mac mouse/trackpad:
while True:
try:
root.mainloop()
break
except UnicodeDecodeError:
pass
| |
"""Test the Volumio config flow."""
from unittest.mock import patch
from homeassistant import config_entries
from homeassistant.components.volumio.config_flow import CannotConnectError
from homeassistant.components.volumio.const import DOMAIN
from tests.common import MockConfigEntry
TEST_SYSTEM_INFO = {"id": "1111-1111-1111-1111", "name": "TestVolumio"}
TEST_CONNECTION = {
"host": "1.1.1.1",
"port": 3000,
}
TEST_DISCOVERY = {
"host": "1.1.1.1",
"port": 3000,
"properties": {"volumioName": "discovered", "UUID": "2222-2222-2222-2222"},
}
TEST_DISCOVERY_RESULT = {
"host": TEST_DISCOVERY["host"],
"port": TEST_DISCOVERY["port"],
"id": TEST_DISCOVERY["properties"]["UUID"],
"name": TEST_DISCOVERY["properties"]["volumioName"],
}
async def test_form(hass):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "TestVolumio"
assert result2["data"] == {**TEST_SYSTEM_INFO, **TEST_CONNECTION}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_updates_unique_id(hass):
"""Test a duplicate id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SYSTEM_INFO["id"],
data={
"host": "dummy",
"port": 11,
"name": "dummy",
"id": TEST_SYSTEM_INFO["id"],
},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch("homeassistant.components.volumio.async_setup", return_value=True), patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
assert entry.data == {**TEST_SYSTEM_INFO, **TEST_CONNECTION}
async def test_empty_system_info(hass):
"""Test old volumio versions with empty system info."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value={},
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_CONNECTION["host"]
assert result2["data"] == {
"host": TEST_CONNECTION["host"],
"port": TEST_CONNECTION["port"],
"name": TEST_CONNECTION["host"],
"id": None,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_exception(hass):
"""Test we handle generic error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_discovery(hass):
"""Test discovery flow works."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_DISCOVERY_RESULT["name"]
assert result2["data"] == TEST_DISCOVERY_RESULT
assert result2["result"]
assert result2["result"].unique_id == TEST_DISCOVERY_RESULT["id"]
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovery_cannot_connect(hass):
"""Test discovery aborts if cannot connect."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
assert result2["type"] == "abort"
assert result2["reason"] == "cannot_connect"
async def test_discovery_duplicate_data(hass):
"""Test discovery aborts if same mDNS packet arrives."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "discovery_confirm"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_discovery_updates_unique_id(hass):
"""Test a duplicate discovery id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DISCOVERY_RESULT["id"],
data={
"host": "dummy",
"port": 11,
"name": "dummy",
"id": TEST_DISCOVERY_RESULT["id"],
},
state=config_entries.ENTRY_STATE_SETUP_RETRY,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data == TEST_DISCOVERY_RESULT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
| |
from time import time
import argparse
import os
from pprint import pprint
import numpy as np
from threadpoolctl import threadpool_limits
import sklearn
from sklearn.model_selection import train_test_split
from sklearn.ensemble import HistGradientBoostingRegressor
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.datasets import make_classification
from sklearn.datasets import make_regression
from sklearn.ensemble._hist_gradient_boosting.utils import (
get_equivalent_estimator)
parser = argparse.ArgumentParser()
parser.add_argument('--n-leaf-nodes', type=int, default=31)
parser.add_argument('--n-trees', type=int, default=10)
parser.add_argument('--lightgbm', action="store_true", default=False,
help='also benchmark lightgbm')
parser.add_argument('--xgboost', action="store_true", default=False,
help='also benchmark xgboost')
parser.add_argument('--catboost', action="store_true", default=False,
help='also benchmark catboost')
parser.add_argument('--learning-rate', type=float, default=.1)
parser.add_argument('--problem', type=str, default='classification',
choices=['classification', 'regression'])
parser.add_argument('--loss', type=str, default='default')
parser.add_argument('--missing-fraction', type=float, default=0)
parser.add_argument('--n-classes', type=int, default=2)
parser.add_argument('--n-samples', type=int, default=int(1e6))
parser.add_argument('--n-features', type=int, default=100)
parser.add_argument('--max-bins', type=int, default=255)
parser.add_argument('--print-params', action="store_true", default=False)
parser.add_argument('--random-sample-weights', action="store_true",
default=False,
help="generate and use random sample weights")
parser.add_argument('--plot', action="store_true", default=False,
help='show a plot results')
parser.add_argument('--plot-filename', default=None,
help='filename to save the figure to disk')
args = parser.parse_args()
n_samples = args.n_samples
n_leaf_nodes = args.n_leaf_nodes
n_trees = args.n_trees
lr = args.learning_rate
max_bins = args.max_bins
print("Data size: %d samples train, %d samples test."
% (n_samples, n_samples))
print(f"n_features: {args.n_features}")
def get_estimator_and_data():
if args.problem == 'classification':
X, y = make_classification(args.n_samples * 2,
n_features=args.n_features,
n_classes=args.n_classes,
n_clusters_per_class=1,
n_informative=args.n_features // 2,
random_state=0)
return X, y, HistGradientBoostingClassifier
elif args.problem == 'regression':
X, y = make_regression(args.n_samples_max * 2,
n_features=args.n_features, random_state=0)
return X, y, HistGradientBoostingRegressor
X, y, Estimator = get_estimator_and_data()
if args.missing_fraction:
mask = np.random.binomial(1, args.missing_fraction, size=X.shape).astype(
bool)
X[mask] = np.nan
if args.random_sample_weights:
sample_weight = np.random.rand(len(X)) * 10
else:
sample_weight = None
if sample_weight is not None:
(X_train_, X_test_, y_train_, y_test_,
sample_weight_train_, _) = train_test_split(
X, y, sample_weight, test_size=0.5, random_state=0)
else:
X_train_, X_test_, y_train_, y_test_ = train_test_split(
X, y, test_size=0.5, random_state=0)
sample_weight_train_ = None
sklearn_est = Estimator(
learning_rate=lr,
max_iter=n_trees,
max_bins=max_bins,
max_leaf_nodes=n_leaf_nodes,
early_stopping=False,
random_state=0,
verbose=0,
)
loss = args.loss
if args.problem == 'classification':
if loss == 'default':
# loss='auto' does not work with get_equivalent_estimator()
loss = 'binary_crossentropy' if args.n_classes == 2 else \
'categorical_crossentropy'
else:
# regression
if loss == 'default':
loss = 'squared_error'
sklearn_est.set_params(loss=loss)
if args.print_params:
print("scikit-learn")
pprint(sklearn_est.get_params())
for libname in ["lightgbm", "xgboost", "catboost"]:
if getattr(args, libname):
print(libname)
est = get_equivalent_estimator(sklearn_est, lib=libname)
pprint(est.get_params())
def one_run(n_threads, n_samples):
X_train = X_train_[:n_samples]
X_test = X_test_[:n_samples]
y_train = y_train_[:n_samples]
y_test = y_test_[:n_samples]
if sample_weight is not None:
sample_weight_train = sample_weight_train_[:n_samples]
else:
sample_weight_train = None
assert X_train.shape[0] == n_samples
assert X_test.shape[0] == n_samples
print("Fitting a sklearn model...")
tic = time()
est = sklearn.base.clone(sklearn_est)
with threadpool_limits(n_threads, user_api="openmp"):
est.fit(X_train, y_train, sample_weight=sample_weight_train)
sklearn_fit_duration = time() - tic
tic = time()
sklearn_score = est.score(X_test, y_test)
sklearn_score_duration = time() - tic
print("score: {:.4f}".format(sklearn_score))
print("fit duration: {:.3f}s,".format(sklearn_fit_duration))
print("score duration: {:.3f}s,".format(sklearn_score_duration))
lightgbm_score = None
lightgbm_fit_duration = None
lightgbm_score_duration = None
if args.lightgbm:
print("Fitting a LightGBM model...")
lightgbm_est = get_equivalent_estimator(est, lib='lightgbm')
lightgbm_est.set_params(num_threads=n_threads)
tic = time()
lightgbm_est.fit(X_train, y_train, sample_weight=sample_weight_train)
lightgbm_fit_duration = time() - tic
tic = time()
lightgbm_score = lightgbm_est.score(X_test, y_test)
lightgbm_score_duration = time() - tic
print("score: {:.4f}".format(lightgbm_score))
print("fit duration: {:.3f}s,".format(lightgbm_fit_duration))
print("score duration: {:.3f}s,".format(lightgbm_score_duration))
xgb_score = None
xgb_fit_duration = None
xgb_score_duration = None
if args.xgboost:
print("Fitting an XGBoost model...")
xgb_est = get_equivalent_estimator(est, lib='xgboost')
xgb_est.set_params(nthread=n_threads)
tic = time()
xgb_est.fit(X_train, y_train, sample_weight=sample_weight_train)
xgb_fit_duration = time() - tic
tic = time()
xgb_score = xgb_est.score(X_test, y_test)
xgb_score_duration = time() - tic
print("score: {:.4f}".format(xgb_score))
print("fit duration: {:.3f}s,".format(xgb_fit_duration))
print("score duration: {:.3f}s,".format(xgb_score_duration))
cat_score = None
cat_fit_duration = None
cat_score_duration = None
if args.catboost:
print("Fitting a CatBoost model...")
cat_est = get_equivalent_estimator(est, lib='catboost')
cat_est.set_params(thread_count=n_threads)
tic = time()
cat_est.fit(X_train, y_train, sample_weight=sample_weight_train)
cat_fit_duration = time() - tic
tic = time()
cat_score = cat_est.score(X_test, y_test)
cat_score_duration = time() - tic
print("score: {:.4f}".format(cat_score))
print("fit duration: {:.3f}s,".format(cat_fit_duration))
print("score duration: {:.3f}s,".format(cat_score_duration))
return (sklearn_score, sklearn_fit_duration, sklearn_score_duration,
lightgbm_score, lightgbm_fit_duration, lightgbm_score_duration,
xgb_score, xgb_fit_duration, xgb_score_duration,
cat_score, cat_fit_duration, cat_score_duration)
max_threads = os.cpu_count()
n_threads_list = [2 ** i for i in range(8) if (2 ** i) < max_threads]
n_threads_list.append(max_threads)
sklearn_scores = []
sklearn_fit_durations = []
sklearn_score_durations = []
lightgbm_scores = []
lightgbm_fit_durations = []
lightgbm_score_durations = []
xgb_scores = []
xgb_fit_durations = []
xgb_score_durations = []
cat_scores = []
cat_fit_durations = []
cat_score_durations = []
for n_threads in n_threads_list:
print(f"n_threads: {n_threads}")
(
sklearn_score,
sklearn_fit_duration,
sklearn_score_duration,
lightgbm_score,
lightgbm_fit_duration,
lightgbm_score_duration,
xgb_score,
xgb_fit_duration,
xgb_score_duration,
cat_score,
cat_fit_duration,
cat_score_duration
) = one_run(n_threads, n_samples)
for scores, score in (
(sklearn_scores, sklearn_score),
(sklearn_fit_durations, sklearn_fit_duration),
(sklearn_score_durations, sklearn_score_duration),
(lightgbm_scores, lightgbm_score),
(lightgbm_fit_durations, lightgbm_fit_duration),
(lightgbm_score_durations, lightgbm_score_duration),
(xgb_scores, xgb_score),
(xgb_fit_durations, xgb_fit_duration),
(xgb_score_durations, xgb_score_duration),
(cat_scores, cat_score),
(cat_fit_durations, cat_fit_duration),
(cat_score_durations, cat_score_duration)):
scores.append(score)
if args.plot or args.plot_filename:
import matplotlib.pyplot as plt
import matplotlib
fig, axs = plt.subplots(2, figsize=(12, 12))
label = f"sklearn {sklearn.__version__}"
axs[0].plot(n_threads_list, sklearn_fit_durations, label=label)
axs[1].plot(n_threads_list, sklearn_score_durations, label=label)
if args.lightgbm:
import lightgbm
label = f'LightGBM {lightgbm.__version__}'
axs[0].plot(n_threads_list, lightgbm_fit_durations, label=label)
axs[1].plot(n_threads_list, lightgbm_score_durations, label=label)
if args.xgboost:
import xgboost
label = f'XGBoost {xgboost.__version__}'
axs[0].plot(n_threads_list, xgb_fit_durations, label=label)
axs[1].plot(n_threads_list, xgb_score_durations, label=label)
if args.catboost:
import catboost
label = f'CatBoost {catboost.__version__}'
axs[0].plot(n_threads_list, cat_fit_durations, label=label)
axs[1].plot(n_threads_list, cat_score_durations, label=label)
for ax in axs:
ax.set_xscale('log')
ax.set_xlabel('n_threads')
ax.set_ylabel('duration (s)')
ax.set_ylim(0, None)
ax.set_xticks(n_threads_list)
ax.get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter())
ax.legend(loc='best')
axs[0].set_title('fit duration (s)')
axs[1].set_title('score duration (s)')
title = args.problem
if args.problem == 'classification':
title += ' n_classes = {}'.format(args.n_classes)
fig.suptitle(title)
plt.tight_layout()
if args.plot_filename:
plt.savefig(args.plot_filename)
if args.plot:
plt.show()
| |
"""
Core functions and classes for FS Nav
"""
import getpass
import os
from os.path import expanduser
from os.path import join
import re
import sys
__all__ = ['Aliases', 'CONFIGFILE', 'DEFAULT_ALIASES']
class Aliases(dict):
def __init__(self, *args, **kwargs):
"""
Reference specific directories on the filesystem via user-defined aliases
stored in a dictionary-like object.
One could just store the aliases and directories in a dictionary but they
should be validated when added. Furthermore directory locations and names
are not completely standardized across operating systems. The default
aliases found in `fsnav.DEFAULT_ALIASES` are almost identical when loaded
on every platform but point to slightly different directories.
In general, treat `Aliases()` as though it was a dictionary. In order
to add an alias, set a key equal to a directory. Aliases must not
have spaces or punctuation and directories must exist and be executable.
An instance of `Aliases()` can be created the following ways:
>>> aliases = Aliases()
>>> aliases['home'] = '~/'
>>> aliases['desk'] = '~/Desktop'
>>> aliases = Aliases(home='~/', desk='~/Desktop')
>>> aliases = Aliases({'home': '~/', 'desk': '~/Desktop'})
>>> aliases = Aliases((('home', '~/'), ('desk', '~/Desktop')))
>>> print(aliases)
Aliases({'home': '/Users/wursterk/', 'desk': '/Users/wursterk/Desktop'})
Aliases can then be used for navigation, most notably via the included
commandline utility ``nav``. See ``nav --help`` for more information
>>> os.chdir(aliases['home'])
>>> os.getcwd()
'~/'
"""
dict.__init__(self)
# Call update to load items - it already handles the syntax for the following
# Aliases(alias='path')
# Aliases(
self.update(*args, **kwargs)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, dict((a, p) for a, p in self.items()))
__str__ = __repr__
def __enter__(self):
"""
Included to enable contextmanager syntax - doesn't do any setup
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Included to enable contextmanager syntax - doesn't do any teardown
"""
pass
def __setitem__(self, alias, path):
"""
Enable ``Aliases[alias] = path`` syntax with the necessary validations.
A valid `alias` does not contain spaces or punctuation and must match
the regex defined in `fsnav.ALIAS_REGEX`. A valid `path` must
exist and be executable. Note that `~/` is expanded but `*` wildcards
are not supported.
Raises
------
ValueError
Invalid alias.
KeyError
Invalid path.
Returns
-------
None
"""
# ave to check for None before expanduser() otherwise an AttributeError is raised
if path is None:
raise ValueError("Path cannot be NoneType")
else:
path = os.path.expanduser(path)
# Validate the alias
if re.match(ALIAS_REGEX, alias) is None:
raise KeyError(
"Aliases can only contain alphanumeric characters and '-' or '_': '%s'"
% alias)
# Validate the path
elif not os.path.isdir(path) and not os.access(path, os.X_OK):
raise ValueError("Can't access path: '%s'" % path)
# Alias and path passed validate - add
else:
# Forces all non-overridden methods that normally call `dict.__setitem__` to call
# `Aliases.__setitem__()` in order to take advantage of the alias and path
# validation
super(Aliases, self).__setitem__(alias, path)
def as_dict(self):
"""
Return the dictionary containing aliases and paths as an actual dictionary
Returns
-------
dict
"""
return dict(self)
def setdefault(self, alias, path=None):
"""
Overrides dict.setdefault() to force usage of new self.__setitem__() method
Returns
-------
str
Path assigned to alias
"""
try:
self[alias]
except KeyError:
self[alias] = path
return self[alias]
def update(self, alias_iterable=None, **alias_path):
"""
Overrides dict.update() to force usage of new self.__setitem__()
Returns
-------
None
"""
if alias_iterable and hasattr(alias_iterable, 'keys'):
for alias in alias_iterable:
self[alias] = alias_iterable[alias]
elif alias_iterable and not hasattr(alias_iterable, 'keys'):
for (alias, path) in alias_iterable:
self[alias] = path
for alias, path in alias_path.items():
self[alias] = path
def copy(self):
"""
Creates a copy of `Aliases()` and all contained aliases and paths
Returns
-------
Aliases
"""
return Aliases(**self.as_dict())
def user_defined(self):
"""
Extract user-defined aliases from an `Aliases()` instance
Returns
-------
Aliases
All user-defined aes
"""
return Aliases({a: p for a, p in self.items() if a not in DEFAULT_ALIASES or
p != DEFAULT_ALIASES[a]})
def default(self):
"""
Extract aliases defined by FS Nav on import
Returns
-------
Aliases
Default aliases
"""
return Aliases({a: p for a, p in self.items() if a in DEFAULT_ALIASES and
p == DEFAULT_ALIASES[a]})
ALIAS_REGEX = "^[\w-]+$"
NAV_UTIL = 'nav'
if 'darwin' in sys.platform.lower().strip(): # pragma no cover
NORMALIZED_PLATFORM = 'mac'
elif 'cygwin' in sys.platform.lower().strip(): # pragma no cover
NORMALIZED_PLATFORM = 'cygwin'
elif 'linux' in sys.platform.lower().strip(): # pragma no cover
NORMALIZED_PLATFORM = 'linux'
elif 'win' in sys.platform.lower().strip(): # pragma no cover
NORMALIZED_PLATFORM = 'windows'
else: # pragma no cover
NORMALIZED_PLATFORM = 'UNKNOWN'
CONFIGFILE = join(expanduser('~'), '.fsnav')
CONFIGFILE_ALIAS_SECTION = 'aliases'
_homedir = expanduser('~')
_username = getpass.getuser()
_MAC_ALIASES = {
'applications': join(os.sep, 'Applications'),
'desk': join(_homedir, 'Desktop'),
'desktop': join(_homedir, 'Desktop'),
'documents': join(_homedir, 'Documents'),
'docs': join(_homedir, 'Documents'),
'downloads': join(_homedir, 'Downloads'),
'dl': join(_homedir, 'Downloads'),
'dropbox': join(_homedir, 'Dropbox'),
'ghub': join(_homedir, 'github'),
'google_drive': join(_homedir, 'Google Drive'),
'gdrive': join(_homedir, 'Google Drive'),
'hard_drive': os.sep,
'hd': os.sep,
'home': _homedir,
'homedir': _homedir,
'images': join(_homedir, 'Pictures'),
'movies': join(_homedir, 'Movies'),
'music': join(_homedir, 'Music'),
'pictures': join(_homedir, 'Pictures'),
'public': join(_homedir, 'Public'),
'user_applications': join(_homedir, 'Applications'),
'user_apps': join(_homedir, 'Applications'),
'userapps': join(_homedir, 'Applications')
}
_DARWIN_ALIASES = _MAC_ALIASES.copy()
_LINUX_ALIASES = {
'applications': join(os.sep, 'Applications'),
'desk': join(_homedir, 'Desktop'),
'desktop': join(_homedir, 'Desktop'),
'documents': join(_homedir, 'Documents'),
'docs': join(_homedir, 'Documents'),
'downloads': join(_homedir, 'Downloads'),
'dl': join(_homedir, 'Downloads'),
'dropbox': join(_homedir, 'Dropbox'),
'ghub': join(_homedir, 'github'),
'google_drive': join(_homedir, 'Google Drive'),
'gdrive': join(_homedir, 'Google Drive'),
'hard_drive': os.sep,
'hd': os.sep,
'home': _homedir,
'homedir': _homedir,
'images': join(_homedir, 'Pictures'),
'movies': join(_homedir, 'Movies'),
'music': join(_homedir, 'Music'),
'pictures': join(_homedir, 'Pictures'),
'public': join(_homedir, 'Public'),
'user_applications': join(_homedir, 'Applications'),
'user_apps': join(_homedir, 'Applications'),
'userapps': join(_homedir, 'Applications')
}
_CYGWIN_ALIASES = {
'applications': join(os.sep, 'cygdrive', 'c', 'Program Files'),
'desk': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Desktop'),
'desktop': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Desktop'),
'documents': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Documents'),
'docs': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Documents'),
'downloads': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Downloads'),
'dl': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Downloads'),
'dropbox': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Dropbox'),
'ghub': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'github'),
'google_drive': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Google Drive'),
'gdrive': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Google Drive'),
'hard_drive': join(os.sep, 'cygdrive', 'c'),
'hd': join(os.sep, 'cygdrive', 'c'),
'home': _homedir,
'homedir': _homedir,
'images': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Pictures'),
'movies': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Videos'),
'music': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Music'),
'pictures': join(os.sep, 'cygdrive', 'c', 'Users', _username, 'Pictures'),
'public': join(os.sep, 'cygdrive', 'c', 'Users', 'Public'),
'winhome': join(os.sep, 'cygdrive', 'c', 'Users', _username),
'windowshome': join(os.sep, 'cygdrive', 'c', 'Users', _username)
}
_WINDOWS_ALIASES = {
'cyghome': join('C:', 'cygwin', 'home', _username),
'cygwinhome': join('C:', 'cygwin', 'home', _username),
'cygwin_home': join('C:', 'cygwin', 'home', _username),
'desk': join(_homedir, 'Desktop'),
'desktop': join(_homedir, 'Desktop'),
'documents': join(_homedir, 'My Documents'),
'downloads': join(_homedir, 'Downloads'),
'dropbox': join(_homedir, 'Dropbox'),
'github': join(_homedir, 'github'),
'google_drive': join(_homedir, 'Google Drive'),
'hard_drive': 'C:',
'hd': 'C:',
'home': _homedir,
'homedir': _homedir,
'images': join(_homedir, 'My Pictures'),
'top_level': join('C:'),
'movies': join(_homedir, 'My Videos'),
'music': join(_homedir, 'My Music'),
'pictures': join(_homedir, 'My Pictures'),
'public': join(_homedir, 'Public'),
'system_apps': join('C:', 'Program Files'),
'user_apps': join(_homedir, 'Program Files')
}
_UNKNOWN_ALIASES = {
'applications': join(os.sep, 'Applications'),
'desk': join(_homedir, 'Desktop'),
'desktop': join(_homedir, 'Desktop'),
'documents': join(_homedir, 'Documents'),
'docs': join(_homedir, 'Documents'),
'downloads': join(_homedir, 'Downloads'),
'dl': join(_homedir, 'Downloads'),
'dropbox': join(_homedir, 'Dropbox'),
'ghub': join(_homedir, 'github'),
'google_drive': join(_homedir, 'Google Drive'),
'gdrive': join(_homedir, 'Google Drive'),
'hard_drive': os.sep,
'hd': os.sep,
'home': _homedir,
'homedir': _homedir,
'movies': join(_homedir, 'Movies'),
'music': join(_homedir, 'Music'),
'pictures': join(_homedir, 'Pictures'),
'public': join(_homedir, 'Public'),
'user_applications': join(_homedir, 'Applications'),
'user_apps': join(_homedir, 'Applications'),
'userapps': join(_homedir, 'Applications')
}
if NORMALIZED_PLATFORM == 'mac': # pragma no cover
_DEFAULT_ALIASES = _MAC_ALIASES.copy()
elif NORMALIZED_PLATFORM == 'linux': # pragma no cover
_DEFAULT_ALIASES = _LINUX_ALIASES.copy()
elif NORMALIZED_PLATFORM == 'cygwin': # pragma no cover
_DEFAULT_ALIASES = _CYGWIN_ALIASES.copy()
elif NORMALIZED_PLATFORM == 'win': # pragma no cover
_DEFAULT_ALIASES = _WINDOWS_ALIASES.copy()
else: # pragma no cover
_DEFAULT_ALIASES = _UNKNOWN_ALIASES.copy()
# Remove aliases pointing towards non-existent directories
# Python 2.6 does not support direct dictionary comprehension
_DEFAULT_ALIASES = dict(
(a, p) for a, p in _DEFAULT_ALIASES.copy().items()
if os.path.isdir(p) and os.access(p, os.X_OK)
)
DEFAULT_ALIASES = _DEFAULT_ALIASES.copy()
| |
from braces.views import LoginRequiredMixin, GroupRequiredMixin
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect
from django.views.generic import ListView, DetailView, CreateView, UpdateView, TemplateView, View
from .forms import JobForm
from .models import Job, JobType, JobCategory
class JobBoardAdminRequiredMixin(GroupRequiredMixin):
group_required = "Job Board Admin"
class JobMixin:
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
active_locations = Job.objects.visible().distinct(
'location_slug'
).order_by(
'location_slug',
)
context.update({
'jobs_count': Job.objects.visible().count(),
'active_types': JobType.objects.with_active_jobs(),
'active_categories': JobCategory.objects.with_active_jobs(),
'active_locations': active_locations,
})
return context
class JobList(JobMixin, ListView):
model = Job
paginate_by = 25
job_list_view = True
def get_queryset(self):
return super().get_queryset().visible().select_related()
class JobListMine(JobMixin, ListView):
model = Job
paginate_by = 25
def get_queryset(self):
queryset = super().get_queryset()
if self.request.user.is_authenticated():
q = Q(creator=self.request.user)
else:
raise Http404
return queryset.filter(q)
class JobTypeMenu:
def job_type_view(self):
return True
class JobCategoryMenu:
def job_category_view(self):
return True
class JobLocationMenu:
def job_location_view(self):
return True
class JobListType(JobTypeMenu, JobList):
template_name = 'jobs/job_type_list.html'
def get_queryset(self):
return super().get_queryset().filter(job_types__slug=self.kwargs['slug'])
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['current_type'] = JobType.objects.get(slug=self.kwargs['slug'])
return context
class JobListCategory(JobCategoryMenu, JobList):
template_name = 'jobs/job_category_list.html'
def get_queryset(self):
return super().get_queryset().filter(category__slug=self.kwargs['slug'])
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['current_category'] = JobCategory.objects.get(slug=self.kwargs['slug'])
return context
class JobListLocation(JobLocationMenu, JobList):
template_name = 'jobs/job_location_list.html'
def get_queryset(self):
return super().get_queryset().filter(location_slug=self.kwargs['slug'])
class JobTypes(JobTypeMenu, JobMixin, ListView):
""" View to simply list JobType instances that have current jobs """
template_name = "jobs/job_types.html"
queryset = JobType.objects.with_active_jobs().order_by('name')
context_object_name = 'types'
class JobCategories(JobCategoryMenu, JobMixin, ListView):
""" View to simply list JobCategory instances that have current jobs """
template_name = "jobs/job_categories.html"
queryset = JobCategory.objects.with_active_jobs().order_by('name')
context_object_name = 'categories'
class JobLocations(JobLocationMenu, JobMixin, TemplateView):
""" View to simply list distinct Countries that have current jobs """
template_name = "jobs/job_locations.html"
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['jobs'] = Job.objects.visible().distinct(
'country', 'city'
).order_by(
'country', 'city'
)
return context
class JobReview(LoginRequiredMixin, JobBoardAdminRequiredMixin, JobMixin, ListView):
template_name = 'jobs/job_review.html'
paginate_by = 20
def get_queryset(self):
return Job.objects.review()
def post(self, request):
try:
job = Job.objects.get(id=request.POST['job_id'])
action = request.POST['action']
except (KeyError, Job.DoesNotExist):
return redirect('jobs:job_review')
if action == 'approve':
job.approve(request.user)
messages.add_message(self.request, messages.SUCCESS, "'%s' approved." % job)
elif action == 'reject':
job.reject(request.user)
messages.add_message(self.request, messages.SUCCESS, "'%s' rejected." % job)
elif action == 'remove':
job.status = Job.STATUS_REMOVED
job.save()
messages.add_message(self.request, messages.SUCCESS, "'%s' removed." % job)
elif action == 'archive':
job.status = Job.STATUS_ARCHIVED
job.save()
messages.add_message(self.request, messages.SUCCESS, "'%s' removed." % job)
return redirect('jobs:job_review')
class JobDetail(JobMixin, DetailView):
model = Job
def get_queryset(self):
""" Show only approved jobs to the public, staff can see all jobs """
qs = Job.objects.select_related()
if self.request.user.is_staff:
return qs
else:
return qs.visible()
def get_context_data(self, **kwargs):
ctx = super().get_context_data(
category_jobs=self.object.category.jobs.select_related('company__name')[:5],
user_can_edit=(self.object.creator == self.request.user)
)
ctx.update(kwargs)
return ctx
class JobDetailReview(LoginRequiredMixin, JobBoardAdminRequiredMixin, JobDetail):
def get_queryset(self):
""" Only staff and creator can review """
if self.request.user.is_staff:
return Job.objects.select_related()
else:
raise Http404()
def get_context_data(self, **kwargs):
ctx = super().get_context_data(
user_can_edit=(
self.object.creator == self.request.user
or self.request.user.is_staff
),
under_review=True,
)
ctx.update(kwargs)
return ctx
class JobCreate(JobMixin, CreateView):
model = Job
form_class = JobForm
def get_success_url(self):
return reverse('jobs:job_thanks')
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
if self.request.user.is_authenticated():
kwargs['initial'] = {'email': self.request.user.email}
return kwargs
def form_valid(self, form):
""" set the creator to the current user """
# Associate Job to user if they are logged in
if self.request.user.is_authenticated():
form.instance.creator = self.request.user
return super().form_valid(form)
class JobEdit(JobMixin, UpdateView):
model = Job
form_class = JobForm
def get_queryset(self):
if not self.request.user.is_authenticated():
raise Http404
if self.request.user.is_staff:
return super().get_queryset()
return self.request.user.jobs_job_creator.all()
def form_valid(self, form):
""" set last_modified_by to the current user """
form.instance.last_modified_by = self.request.user
return super().form_valid(form)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(
form_action='update',
)
ctx.update(kwargs)
return ctx
class JobChangeStatus(LoginRequiredMixin, JobMixin, View):
"""
Abstract class to change a job's status; see the concrete implentations below.
"""
def post(self, request, pk):
job = get_object_or_404(self.request.user.jobs_job_creator, pk=pk)
job.status = self.new_status
job.save()
messages.add_message(self.request, messages.SUCCESS, self.success_message)
return redirect('job_detail', job.id)
class JobPublish(JobChangeStatus):
new_status = Job.STATUS_APPROVED
success_message = 'Your job listing has been published.'
class JobArchive(JobChangeStatus):
new_status = Job.STATUS_ARCHIVED
success_message = 'Your job listing has been archived and is no longer public.'
| |
# Copyright (c) 2015 Pixomondo
#
# CONFIDENTIAL AND PROPRIETARY
#
# This work is provided "AS IS" and subject to the MIT License included in this
# distribution package. See LICENSE.
# By accessing, using, copying or modifying this work you indicate your
# agreement to the MIT License. All rights
# not expressly granted therein are reserved by Pixomondo.
import base64
import os
import pickle
import sys
import zlib
import hou
import sgtk
class ToolkitGeometryNodeHandler(object):
SG_NODE_CLASS = 'sgtk_geometry'
PARM_OUTPUT_PATH = 'sopoutput'
PARM_CONFIG = 'geometry_config'
def __init__(self, app):
self._app = app
self._work_file_template = self._app.get_template("work_file_template")
############################################################################
# Public methods
def compute_path(self, node):
# Get relevant fields from the scene filename and contents
work_file_fields = self.__get_hipfile_fields()
if not work_file_fields:
msg = "This Houdini file is not a Shotgun Toolkit work file!"
raise sgtk.TankError(msg)
# Get the templates from the app
template = self._app.get_template("work_cache_template")
# create fields dict with all the metadata
fields = {}
fields["name"] = work_file_fields.get("name")
fields["version"] = work_file_fields["version"]
fields["renderpass"] = node.name()
fields["SEQ"] = "FORMAT: $F"
# Get the camera width and height if necessary
if "width" in template.keys or "height" in template.keys:
# Get the camera
cam_path = node.parm("geometry1_camera").eval()
cam_node = hou.node(cam_path)
if not cam_node:
raise sgtk.TankError("Camera %s not found." % cam_path)
fields["width"] = cam_node.parm("resx").eval()
fields["height"] = cam_node.parm("resy").eval()
fields.update(self._app.context.as_template_fields(template))
path = template.apply_fields(fields)
path = path.replace(os.path.sep, "/")
return path
def get_nodes(self, class_=None):
"""
Returns a list of sgtk nodes
"""
node_class = ToolkitGeometryNodeHandler.SG_NODE_CLASS
sop = True if not class_ or class_ == 'sop' else False
rop = True if not class_ or class_ == 'rop' else False
nodes = []
if sop:
nodes += hou.nodeType(hou.sopNodeTypeCategory(),
node_class).instances()
if rop:
nodes += hou.nodeType(hou.ropNodeTypeCategory(),
node_class).instances()
return nodes
def get_node_profile_name(self, node):
"""
Return the name of the profile the specified node is using
"""
config_parm = node.parm(self.PARM_CONFIG)
return config_parm.menuLabels()[config_parm.eval()]
def get_files_on_disk(self, node):
"""
Called from render publisher & UI (via exists_on_disk)
Returns the files on disk associated with this node
"""
return self.__get_files_on_disk(node)
def create_file_node(self):
"""
Used by geometry_filein_button callback.
Creates a file node.
Sets the path to the current output path of this node.
Sets the node name to the current nodes names.
"""
node = hou.pwd()
parm = node.parm(self.PARM_OUTPUT_PATH)
name = 'file_' + node.name()
file_sop = node.parent().createNode("file")
file_sop.parm("file").set(parm.menuLabels()[parm.eval()])
file_sop.setName(name, unique_name=True)
# Move it away from the origin
file_sop.moveToGoodPosition()
def set_default_node_name(self, node):
name = self._app.get_setting('default_node_name')
return node.setName(name, unique_name=True)
def create_output_path_menu(self):
"""
Creates the output path menu.
"""
node = hou.pwd()
# Build the menu
menu = []
menu.append("sgtk")
try:
menu.append(self.compute_path(node))
except sgtk.TankError, err:
warn_err = '{0}: {1}'.format(node.name(), err)
self._app.log_warning(warn_err)
menu.append("ERROR: %s" % err)
return menu
def convert_sg_to_geometry_nodes(self):
"""
Utility function to convert all Shotgun Geometry nodes to regular
Geometry nodes.
# Example use:
import sgtk
eng = sgtk.platform.current_engine()
app = eng.apps["tk-houdini-geometrynode"]
# Convert Shotgun Geometry nodes to Geometry nodes:
app.convert_to_geometry_nodes()
"""
# get sgtk geometry nodes:
sg_nodes = self.get_nodes()
for sg_n in sg_nodes:
try:
sop_types = hou.sopNodeTypeCategory().nodeTypes()
sop_type = sop_types[ToolkitGeometryNodeHandler.SG_NODE_CLASS]
rop_types = hou.ropNodeTypeCategory().nodeTypes()
rop_type = rop_types[ToolkitGeometryNodeHandler.SG_NODE_CLASS]
is_sop = sg_n.type() == sop_type
is_rop = sg_n.type() == rop_type
# set as selected:
node_name = sg_n.name()
node_pos = sg_n.position()
self._app.log_debug('Converting node: {0}'.format(sg_n.name()))
self._app.log_debug('path: {0}'.format(sg_n.path()))
# create new regular Geometry node:
if is_sop:
geometry_operator = 'rop_geometry'
elif is_rop:
geometry_operator = 'geometry'
else:
continue
new_n = sg_n.parent().createNode(geometry_operator)
# copy across file parms:
filename = self.__get_menu_label(sg_n.parm('sopoutput'))
new_n.parm('sopoutput').set(filename)
# copy across any knob values from the internal geometry node.
# parmTuples
exclude = ['sopoutput']
self.__copy_parm_values(sg_n, new_n, exclude)
# Store Toolkit specific information on geometry node
# so that we can reverse this process later
# Profile Name
new_n.setUserData('tk_profile_name',
self.get_node_profile_name(sg_n))
# Copy inputs and move outputs
self.__copy_inputs_to_node(sg_n, new_n)
if is_rop:
self.__move_outputs_to_node(sg_n, new_n)
elif is_sop:
self.__move_outputs_from_node_to_user_data(sg_n, new_n)
self.__copy_color(sg_n, new_n)
# delete original node:
sg_n.destroy()
# rename new node:
new_n.setName(node_name)
new_n.setPosition(node_pos)
except Exception as err:
self._app.log_warning(err)
msg = 'Problems converting node: {0}'.format(sg_n.path())
self._app.log_warning(msg)
def convert_geometry_to_sg_nodes(self):
"""
Utility function to convert all Geometry nodes to Shotgun
Geometry nodes (only converts Geometry nodes that were previously
Shotgun Geometry nodes)
# Example use:
import sgtk
eng = sgtk.platform.current_engine()
app = eng.apps["tk-houdini-geometrynode"]
# Convert previously converted Geometry nodes back to
# Shotgun Geometry nodes:
app.convert_from_geometry_nodes()
"""
# get geometry nodes:
sop_nodes = hou.nodeType(hou.sopNodeTypeCategory(),
'rop_geometry').instances()
rop_nodes = hou.nodeType(hou.ropNodeTypeCategory(),
'geometry').instances()
nodes = sop_nodes + rop_nodes
for n in nodes:
try:
user_dict = n.userDataDict()
profile = user_dict.get('tk_profile_name')
if not profile:
# can't convert to a Shotgun Geometry Node
# as we have missing parameters!
continue
# set as selected:
# wn.setSelected(True)
node_name = n.name()
node_pos = n.position()
self._app.log_debug('Converting node: {0}'.format(n.name()))
self._app.log_debug('path: {0}'.format(n.path()))
# create new Shotgun Geometry node:
node_class = ToolkitGeometryNodeHandler.SG_NODE_CLASS
new_sg_n = n.parent().createNode(node_class)
# set the profile
try:
parm = new_sg_n.parm(ToolkitGeometryNodeHandler.PARM_CONFIG)
index = parm.menuLabels().index(profile)
parm.set(index)
except ValueError:
pass
# copy across and knob values from the internal geometry node.
exclude = ['sopoutput']
self.__copy_parm_values(n, new_sg_n, exclude)
# Copy inputs and move outputs
self.__copy_inputs_to_node(n, new_sg_n)
self.__move_outputs_to_node(n, new_sg_n)
self.__move_outputs_from_user_data_to_node(n, new_sg_n)
self.__copy_color(n, new_sg_n)
# delete original node:
n.destroy()
# rename new node:
new_sg_n.setName(node_name)
new_sg_n.setPosition(node_pos)
except Exception as err:
self._app.log_warning(err)
msg = 'Problems converting node: {0}'.format(n.path())
self._app.log_warning(msg)
############################################################################
# Public methods called from OTL - although these are public, they should
# be considered as private and not used directly!
def on_copy_path_to_clipboard_button_callback(self):
"""
Callback from the gizmo whenever the 'Copy path to clipboard' button
is pressed.
"""
node = hou.pwd()
# get the path depending if in full or proxy mode:
render_path = self.__get_render_path(node)
# use Qt to copy the path to the clipboard:
from sgtk.platform.qt import QtGui
QtGui.QApplication.clipboard().setText(render_path)
def on_show_in_fs_button_callback(self):
"""
Shows the location of the node in the file system.
This is a callback which is executed when the show in fs
button is pressed on the houdini output node.
"""
node = hou.pwd()
if not node:
return
render_dir = None
# first, try to just use the current cached path:
render_path = self.__get_render_path(node)
if render_path:
# the above method returns houdini style slashes, so ensure these
# are pointing correctly
render_path = render_path.replace("/", os.path.sep)
dir_name = os.path.dirname(render_path)
if os.path.exists(dir_name):
render_dir = dir_name
if not render_dir:
# render directory doesn't exist so try using location
# of rendered frames instead:
try:
files = self.get_files_on_disk(node)
if len(files) == 0:
msg = ("There are no renders for this node yet!\n"
"When you render, the files will be written to "
"the following location:\n\n%s" % render_path)
hou.ui.displayMessage(msg)
else:
render_dir = os.path.dirname(files[0])
except Exception, e:
msg = ("Unable to jump to file system:\n\n%s" % e)
hou.ui.displayMessage(msg)
# if we have a valid render path then show it:
if render_dir:
system = sys.platform
# run the app
if system == "linux2":
cmd = "xdg-open \"%s\"" % render_dir
elif system == "darwin":
cmd = "open '%s'" % render_dir
elif system == "win32":
cmd = "cmd.exe /C start \"Folder\" \"%s\"" % render_dir
else:
raise Exception("Platform '%s' is not supported." % system)
self._app.log_debug("Executing command '%s'" % cmd)
exit_code = os.system(cmd)
if exit_code != 0:
msg = ("Failed to launch '%s'!" % cmd)
hou.ui.displayMessage(msg)
############################################################################
# Private methods
def __copy_color(self, node_a, node_b):
color_a = node_a.color()
node_b.setColor(color_a)
def __get_menu_label(self, parm, check_for_sgtk=True):
if not check_for_sgtk:
return parm.menuLabels()[parm.eval()]
if parm.menuItems()[parm.eval()] == 'sgtk':
return parm.menuLabels()[parm.eval()]
else:
return parm.menuItems()[parm.eval()]
def __get_hipfile_fields(self):
"""
Extract fields from the current Houdini file using the template
"""
curr_filename = hou.hipFile.path()
work_fields = {}
if self._work_file_template \
and self._work_file_template.validate(curr_filename):
work_fields = self._work_file_template.get_fields(curr_filename)
return work_fields
def __get_render_path(self, node):
output_parm = node.parm(self.PARM_OUTPUT_PATH)
path = output_parm.menuLabels()[output_parm.eval()]
return path
def __get_render_template(self, node):
"""
Get a specific render template for the current profile
"""
return self.__get_template(node, "work_render_template")
def __get_template(self, node, name):
"""
Get the named template for the specified node.
"""
return self._app.get_template(name)
def __get_files_on_disk(self, node):
"""
Called from render publisher & UI (via exists_on_disk)
Returns the files on disk associated with this node
"""
file_name = self.__get_render_path(node)
template = self.__get_render_template(node)
if not template.validate(file_name):
msg = ("Could not resolve the files on disk for node %s."
"The path '%s' is not recognized by Shotgun!"
% (node.name(), file_name))
raise Exception(msg)
fields = template.get_fields(file_name)
# make sure we don't look for any eye - %V or SEQ - %04d stuff
frames = self._app.tank.paths_from_template(template, fields,
["SEQ", "eye"])
return frames
def __copy_parm_values(self, source_node, target_node, exclude=None):
"""
Copy parameter values of the source node to those of the target node
if a parameter with the same name exists.
"""
exclude = exclude if exclude else []
parms = [p for p in source_node.parms() if p.name() not in exclude]
for parm_to_copy in parms:
parm_template = parm_to_copy.parmTemplate()
# Skip folder parms.
if isinstance(parm_template, hou.FolderSetParmTemplate):
continue
parm_to_copy_to = target_node.parm(parm_to_copy.name())
# If the parm on the target node does not exist, skip this parm.
if parm_to_copy_to is None:
continue
# If we have keys/expressions we need to copy them all.
if parm_to_copy.keyframes():
# Copy all hou.Keyframe objects.
for key in parm_to_copy.keyframes():
parm_to_copy_to.setKeyframe(key)
else:
# If the parameter is a string copy the raw string.
if isinstance(parm_template, hou.StringParmTemplate):
parm_to_copy_to.set(parm_to_copy.unexpandedString())
# Copy the raw value.
else:
parm_to_copy_to.set(parm_to_copy.eval())
def __copy_inputs_to_node(self, node, target, ignore_missing=False):
""" Copy all the input connections from this node to the
target node.
ignore_missing: If the target node does not have enough
inputs then skip this connection.
"""
input_connections = node.inputConnections()
num_target_inputs = len(target.inputConnectors())
if num_target_inputs is 0:
raise hou.OperationFailed("Target node has no inputs.")
for connection in input_connections:
index = connection.inputIndex()
if index > (num_target_inputs - 1):
if ignore_missing:
continue
else:
raise hou.InvalidInput("Target node has too few inputs.")
target.setInput(index, connection.inputNode())
def __move_outputs_to_node(self, node, target):
""" Move all the output connections from this node to the
target node.
"""
output_connections = node.outputConnections()
for connection in output_connections:
node = connection.outputNode()
node.setInput(connection.inputIndex(), target)
def __move_outputs_from_node_to_user_data(self, node, target):
"""Saves output connections into user data of target node.
Needed when target node doesn't have outputs.
"""
output_connections = node.outputConnections()
if not output_connections:
return
outputs = []
for connection in output_connections:
output_dict = {}
output_dict['node'] = connection.outputNode().path()
output_dict['input'] = connection.inputIndex()
outputs.append(output_dict)
self._set_compressed_json(target, 'tk_output_connections', outputs)
def __move_outputs_from_user_data_to_node(self, node, target):
""" Move all the output connections from this node to the
target node.
"""
outputs = self._get_compressed_json(node, 'tk_output_connections')
if not outputs:
return
for connection in outputs:
node = hou.node(connection['node'])
node.setInput(connection['input'], target)
def _set_compressed_json(self, node, key, data):
"""Save python structures (like list or dictionary) as json string in
user data of a node.
"""
self._app.log_debug(node)
data = pickle.dumps(data)
data_string = 'sgtk-01:' + base64.b64encode(zlib.compress(data))
node.setUserData(key, data_string)
def _get_compressed_json(self, node, key):
"""Returns the python structure from a decompressed json string.
"""
self._app.log_debug(node)
str_data = node.userData(key)
if str_data is None:
return None
str_ = zlib.decompress(base64.b64decode(str_data[7:]))
return pickle.loads(str_)
| |
from django import template
from django.contrib.auth.models import User, AnonymousUser, Group
from django.core.urlresolvers import reverse
from django.test import TestCase
from follow import signals, utils
from .models import Follow
from .utils import register
register(User)
register(Group)
class FollowTest(TestCase):
urls = 'follow.urls'
def setUp(self):
self.lennon = User.objects.create(username='lennon')
self.lennon.set_password('test')
self.lennon.save()
self.hendrix = User.objects.create(username='hendrix')
self.musicians = Group.objects.create()
self.lennon.groups.add(self.musicians)
def test_follow(self):
follow = Follow.objects.create(self.lennon, self.hendrix)
_, result = Follow.objects.get_or_create(self.lennon, self.hendrix)
self.assertEqual(False, result)
result = Follow.objects.is_following(self.lennon, self.hendrix)
self.assertEqual(True, result)
result = Follow.objects.is_following(self.hendrix, self.lennon)
self.assertEqual(False, result)
result = Follow.objects.get_follows(User)
self.assertEqual(1, len(result))
self.assertEqual(self.lennon, result[0].user)
result = Follow.objects.get_follows(self.hendrix)
self.assertEqual(1, len(result))
self.assertEqual(self.lennon, result[0].user)
result = self.hendrix.get_follows()
self.assertEqual(1, len(result))
self.assertEqual(self.lennon, result[0].user)
result = self.lennon.get_follows()
self.assertEqual(0, len(result), result)
utils.toggle(self.lennon, self.hendrix)
self.assertEqual(0, len(self.hendrix.get_follows()))
utils.toggle(self.lennon, self.hendrix)
self.assertEqual(1, len(self.hendrix.get_follows()))
def test_get_follows_for_queryset(self):
utils.follow(self.hendrix, self.lennon)
utils.follow(self.lennon, self.hendrix)
result = Follow.objects.get_follows(User.objects.all())
self.assertEqual(2, result.count())
def test_follow_http(self):
self.client.login(username='lennon', password='test')
follow_url = reverse('follow', args=['auth', 'user', self.hendrix.id])
unfollow_url = reverse('follow', args=['auth', 'user', self.hendrix.id])
toggle_url = reverse('toggle', args=['auth', 'user', self.hendrix.id])
response = self.client.post(follow_url)
self.assertEqual(302, response.status_code)
response = self.client.post(follow_url)
self.assertEqual(302, response.status_code)
response = self.client.post(unfollow_url)
self.assertEqual(302, response.status_code)
response = self.client.post(toggle_url)
self.assertEqual(302, response.status_code)
def test_get_fail(self):
self.client.login(username='lennon', password='test')
follow_url = reverse('follow', args=['auth', 'user', self.hendrix.id])
unfollow_url = reverse('follow', args=['auth', 'user', self.hendrix.id])
response = self.client.get(follow_url)
self.assertEqual(400, response.status_code)
response = self.client.get(unfollow_url)
self.assertEqual(400, response.status_code)
def test_no_absolute_url(self):
self.client.login(username='lennon', password='test')
get_absolute_url = User.get_absolute_url
User.get_absolute_url = None
follow_url = utils.follow_link(self.hendrix)
response = self.client.post(follow_url)
self.assertEqual(500, response.status_code)
def test_template_tags(self):
follow_url = reverse('follow', args=['auth', 'user', self.hendrix.id])
unfollow_url = reverse('unfollow', args=['auth', 'user', self.hendrix.id])
request = type('Request', (object,), {'user': self.lennon})()
self.assertEqual(follow_url, utils.follow_link(self.hendrix))
self.assertEqual(unfollow_url, utils.unfollow_link(self.hendrix))
tpl = template.Template("""{% load follow_tags %}{% follow_url obj %}""")
ctx = template.Context({
'obj':self.hendrix,
'request': request
})
self.assertEqual(follow_url, tpl.render(ctx))
utils.follow(self.lennon, self.hendrix)
self.assertEqual(unfollow_url, tpl.render(ctx))
utils.unfollow(self.lennon, self.hendrix)
self.assertEqual(follow_url, tpl.render(ctx))
tpl = template.Template("""{% load follow_tags %}{% follow_url obj user %}""")
ctx2 = template.Context({
'obj': self.lennon,
'user': self.hendrix,
'request': request
})
self.assertEqual(utils.follow_url(self.hendrix, self.lennon), tpl.render(ctx2))
tpl = template.Template("""{% load follow_tags %}{% if request.user|is_following:obj %}True{% else %}False{% endif %}""")
self.assertEqual("False", tpl.render(ctx))
utils.follow(self.lennon, self.hendrix)
self.assertEqual("True", tpl.render(ctx))
tpl = template.Template("""{% load follow_tags %}{% follow_form obj %}""")
self.assertEqual(True, isinstance(tpl.render(ctx), unicode))
tpl = template.Template("""{% load follow_tags %}{% follow_form obj "follow/form.html" %}""")
self.assertEqual(True, isinstance(tpl.render(ctx), unicode))
def test_signals(self):
Handler = type('Handler', (object,), {
'inc': lambda self: setattr(self, 'i', getattr(self, 'i') + 1),
'i': 0
})
user_handler = Handler()
group_handler = Handler()
def follow_handler(sender, user, target, instance, **kwargs):
self.assertEqual(sender, User)
self.assertEqual(self.lennon, user)
self.assertEqual(self.hendrix, target)
self.assertEqual(True, isinstance(instance, Follow))
user_handler.inc()
def unfollow_handler(sender, user, target, instance, **kwargs):
self.assertEqual(sender, User)
self.assertEqual(self.lennon, user)
self.assertEqual(self.hendrix, target)
self.assertEqual(True, isinstance(instance, Follow))
user_handler.inc()
def group_follow_handler(sender, **kwargs):
self.assertEqual(sender, Group)
group_handler.inc()
def group_unfollow_handler(sender, **kwargs):
self.assertEqual(sender, Group)
group_handler.inc()
signals.followed.connect(follow_handler, sender=User, dispatch_uid='userfollow')
signals.unfollowed.connect(unfollow_handler, sender=User, dispatch_uid='userunfollow')
signals.followed.connect(group_follow_handler, sender=Group, dispatch_uid='groupfollow')
signals.unfollowed.connect(group_unfollow_handler, sender=Group, dispatch_uid='groupunfollow')
utils.follow(self.lennon, self.hendrix)
utils.unfollow(self.lennon, self.hendrix)
self.assertEqual(2, user_handler.i)
utils.follow(self.lennon, self.musicians)
utils.unfollow(self.lennon, self.musicians)
self.assertEqual(2, user_handler.i)
self.assertEqual(2, group_handler.i)
def test_anonymous_is_following(self):
self.assertEqual(False, Follow.objects.is_following(AnonymousUser(), self.lennon))
| |
# pyOCD debugger
# Copyright (c) 2017-2020 Arm Limited
# Copyright (c) 2021 Chris Reed
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import (NamedTuple, Optional)
class BoardInfo(NamedTuple):
name: str
target: str
binary: Optional[str] = None
vendor: Optional[str] = None
BOARD_ID_TO_INFO = {
# Note: please keep board list sorted by ID!
#
# Board ID Board Name Target Test Binary
"0200": BoardInfo( "FRDM-KL25Z", "kl25z", "l1_kl25z.bin" ),
"0201": BoardInfo( "FRDM-KW41Z", "kw41z4", "l1_kw41z4.bin" ),
"0202": BoardInfo( "USB-KW41Z", "kw41z4", "l1_kw41z4.bin" ),
"0203": BoardInfo( "TWR-KL28Z72M", "kl28z", "l1_kl28z.bin", ),
"0204": BoardInfo( "FRDM-KL02Z", "kl02z", "l1_kl02z.bin", ),
"0205": BoardInfo( "FRDM-KL28Z", "kl28z", "l1_kl28z.bin", ),
"0206": BoardInfo( "TWR-KE18F", "ke18f16", "l1_ke18f16.bin", ),
"0210": BoardInfo( "FRDM-KL05Z", "kl05z", "l1_kl05z.bin", ),
"0213": BoardInfo( "FRDM-KE15Z", "ke15z7", "l1_ke15z7.bin", ),
"0214": BoardInfo( "Hexiwear", "k64f", "l1_k64f.bin", ),
"0215": BoardInfo( "FRDM-KL28ZEM", "kl28z", "l1_kl28z.bin", ),
"0216": BoardInfo( "HVP-KE18F", "ke18f16", "l1_ke18f16.bin", ),
"0217": BoardInfo( "FRDM-K82F", "k82f25615", "l1_k82f.bin", ),
"0218": BoardInfo( "FRDM-KL82Z", "kl82z7", "l1_kl82z.bin", ),
"0219": BoardInfo( "TWR-KV46F150M", "mkv46f256vll16", None, ),
"0220": BoardInfo( "FRDM-KL46Z", "kl46z", "l1_kl46z.bin", ),
"0221": BoardInfo( "TWR-KV11Z75M", "kv11z7", None, ),
"0222": BoardInfo( "FRDM-KEA128Z", "skeaz128xxx4", None, ),
"0223": BoardInfo( "FRDM-KE02Z", "mke02z64vlh4", None, ),
"0224": BoardInfo( "FRDM-K28F", "k28f15", "l1_k28f.bin", ),
"0225": BoardInfo( "FRDM-K32W042", "k32w042s", "l1_k32w042s.bin", ),
"0226": BoardInfo( "MIMXRT1020-EVK", "mimxrt1020", "l1_mimxrt1020-evk.bin",),
"0227": BoardInfo( "MIMXRT1050-EVKB", "mimxrt1050_hyperflash", "l1_mimxrt1050-evkb_hyperflash.bin",),
"0228": BoardInfo( "Rapid-IoT-K64F", "k64f", None, ),
"0229": BoardInfo( "MIMXRT1060-EVK", "mimxrt1060", 'evkmimxrt1060.bin', ),
"0230": BoardInfo( "FRDM-K20D50M", "k20d50m", "l1_k20d50m.bin", ),
"0231": BoardInfo( "FRDM-K22F", "k22f", "l1_k22f.bin", ),
"0232": BoardInfo( "MIMXRT1064-EVK", "mimxrt1064", 'evkmimxrt1064.bin', ),
"0233": BoardInfo( "FRDM-KE16Z", "mke16z64vlf4", None, ),
"0234": BoardInfo( "Rapid-IoT-KW41Z", "kw41z4", "l1_kw41z4.bin", ),
"0235": BoardInfo( "LPC54018IoTModule", "lpc54018jet180", None, ),
"0236": BoardInfo( "LPCXpresso55S69", "lpc55s69", "lpcxpresso55s69.bin", ),
"0237": BoardInfo( "FRDM-K32L3A6", "k32l3a60vpj1a", None, ),
"0238": BoardInfo( "MIMXRT1024-EVK", "mimxrt1024", "evkmimxrt1024.bin", ),
"0239": BoardInfo( "FRDM-K32L2B3", "k32l2b3", "l1_frdm_k32l2b3.bin", ),
"0240": BoardInfo( "FRDM-K64F", "k64f", "l1_k64f.bin", ),
"0241": BoardInfo( "TWR-KM35Z75M", "mkm35z512vll7", None, ),
"0242": BoardInfo( "MIMXRT1010-EVK", "mimxrt1010", "l1_mimxrt1010-evk.bin",),
"0243": BoardInfo( "MIMXRT1015-EVK", "mimxrt1015", "l1_mimxrt1015-evk.bin",),
"0244": BoardInfo( "MIMXRT1170-EVK", "mimxrt1170_cm7", "l1_rt1170.bin", ),
"0245": BoardInfo( "IBMEthernetKit", "k64f", "l1_k64f.bin" ),
"0246": BoardInfo( "MIMXRT1160-EVK", "mimxrt1160_cm7", None, ),
"0250": BoardInfo( "FRDM-KW24D512", "kw24d5", "l1_kw24d5.bin" ),
"0251": BoardInfo( "FRDM-KW36", "kw36z4", "l1_kw36z.bin", ),
"0252": BoardInfo( "FRDM-KW38", "kw38z4", None, ),
"0253": BoardInfo( "USB-KW38", "kw38z4", None, ),
"0254": BoardInfo( "KW38-ER-RD", "kw38z4", None, ),
"0260": BoardInfo( "FRDM-KL26Z", "kl26z", "l1_kl26z.bin", ),
"0261": BoardInfo( "FRDM-KL27Z", "kl27z4", "l1_kl27z.bin", ),
"0262": BoardInfo( "FRDM-KL43Z", "kl43z4", "l1_kl26z.bin", ),
"0270": BoardInfo( "FRDM-KE02Z40M", "mke02z64vlh4", None, ),
"0280": BoardInfo( "TWR-K24F120M", "mk24fn256vdc12", None, ),
"0290": BoardInfo( "FRDM-KW40Z", "kw40z4", "l1_kw40z.bin", ),
"0291": BoardInfo( "TWR-KL82Z72M", "kl82z7", "l1_kl82z.bin", ),
"0298": BoardInfo( "FRDM-KV10Z", "kv10z7", "l1_kl25z.bin" ),
"0300": BoardInfo( "TWR-KV11Z75M", "kv11z7", "l1_kl25z.bin" ),
"0305": BoardInfo( "MTS_MDOT_F405RG", "stm32f405rgtx", None ),
"0310": BoardInfo( "MTS_DRAGONFLY_F411RE", "stm32f411retx", None ),
"0311": BoardInfo( "FRDM-K66F", "k66f18", "l1_k66f.bin", ),
"0312": BoardInfo( "MTS_DRAGONFLY_L471QG", "stm32l471qgix", None ),
"0315": BoardInfo( "MTS_MDOT_F411RE", "stm32f411retx", None ),
"0320": BoardInfo( "FRDM-KW01Z9032", "kw01z4", "l1_kl26z.bin" ),
"0321": BoardInfo( "USB-KW01Z", "kw01z4", "l1_kl25z.bin" ),
"0324": BoardInfo( "USB-KW40Z", "kw40z4", "l1_kl25z.bin" ),
"0330": BoardInfo( "TWR-KV58F220M", "mkv58f512vll24", None, ),
"0340": BoardInfo( "TWR-K80F150M", "mk80fn256vll15", None, ),
"0341": BoardInfo( "FRDM-KV31F", "mkv31f512vll12", None, ),
"0350": BoardInfo( "XDOT_L151CC", "stm32l151cctx", None ),
"0400": BoardInfo( "MAXWSNENV", "max32600", "l1_maxwsnenv.bin", ),
"0405": BoardInfo( "MAX32600MBED", "max32600", "l1_max32600mbed.bin", ),
"0406": BoardInfo( "MAX32620MBED", "max32620", None ),
"0407": BoardInfo( "MAX32620HSP", "max32620", None ),
"0408": BoardInfo( "MAX32625NEXPAQ", "max32625", None ),
"0409": BoardInfo( "MAX32630FTHR", "max32630", "max32630fthr.bin", ),
"0415": BoardInfo( "MAX32625MBED", "max32625", "max32625mbed.bin", ),
"0416": BoardInfo( "MAX32625PICO", "max32625", "max32625pico.bin", ),
"0417": BoardInfo( "MAX32630MBED", "max32630", None ),
"0418": BoardInfo( "MAX32620FTHR", "max32620", "max32620fthr.bin", ),
"0420": BoardInfo( "MAX32630HSP3", "max32630", None ),
"0421": BoardInfo( "MAX32660EVSYS", "max32660", "max32660evsys.bin", ),
"0451": BoardInfo( "MTB MXChip EMW3166", "stm32f412xg", "mtb_mxchip_emw3166.bin",),
"0459": BoardInfo( "MTB Advantech WISE-1530", "stm32f412xg", "mtb_wise-1530.bin", ),
"0462": BoardInfo( "MTB USI WM-BN-BM-22", "stm32f412xg", "mtb_usi_wm-bn-bm-22.bin",),
"0602": BoardInfo( "EV_COG_AD3029LZ", "aducm3029", None ),
"0603": BoardInfo( "EV_COG_AD4050LZ", "aducm4050", None ),
"0604": BoardInfo( "SDK-K1", "stm32f469nihx", None, ),
"0700": BoardInfo( "NUCLEO-F103RB", "stm32f103rb", "ST-Nucleo-F103RB.bin", ),
"0705": BoardInfo( "NUCLEO-F302R8", "stm32f302r8tx", None, ),
"0710": BoardInfo( "NUCLEO-L152RE", "stm32l152re", "NUCLEO_L152RE.bin", ),
"0715": BoardInfo( "NUCLEO-L053R8", "stm32l053r8tx", "NUCLEO_L053R8.bin", ),
"0720": BoardInfo( "NUCLEO-F401RE", "stm32f401retx", None, ),
"0725": BoardInfo( "NUCLEO-F030R8", "stm32f030r8tx", None, ),
"0729": BoardInfo( "NUCLEO-G071RB", "stm32g071rbtx", None, ),
"0730": BoardInfo( "NUCLEO-F072RB", "stm32f072rbtx", "NUCLEO_F072RB.bin", ),
"0735": BoardInfo( "NUCLEO-F334R8", "stm32f334r8tx", "NUCLEO_F334R8.bin", ),
"0740": BoardInfo( "NUCLEO-F411RE", "stm32f411retx", "NUCLEO_F411RE.bin", ),
"0742": BoardInfo( "NUCLEO-F413ZH", "stm32f413zhtx", None, ),
"0743": BoardInfo( "DISCO-F413ZH", "stm32f413zhtx", None, ),
"0744": BoardInfo( "NUCLEO-F410RB", "stm32f410rbtx", None, ),
"0745": BoardInfo( "NUCLEO-F303RE", "stm32f303retx", None, ),
"0746": BoardInfo( "DISCO-F303VC", "stm32f303vcyx", None, ),
"0747": BoardInfo( "NUCLEO-F303ZE", "stm32f303zetx", None, ),
"0750": BoardInfo( "NUCLEO-F091RC", "stm32f091rctx", None, ),
"0755": BoardInfo( "NUCLEO-F070RB", "stm32f070rbtx", None, ),
"0760": BoardInfo( "NUCLEO-L073RZ", "stm32l073rztx", None, ),
"0764": BoardInfo( "DISCO-L475VG-IOT01A", "stm32l475xg", "stm32l475vg_iot01a.bin",),
"0765": BoardInfo( "NUCLEO-L476RG", "stm32l476rgtx", "NUCLEO_L476RG.bin", ),
"0770": BoardInfo( "NUCLEO-L432KC", "stm32l432kcux", "NUCLEO_L432KC.bin", ),
"0774": BoardInfo( "DISCO-L4R9I", "stm32l4r9aiix", None, ),
"0775": BoardInfo( "NUCLEO-F303K8", "stm32f303k8tx", None, ),
"0776": BoardInfo( "NUCLEO-L4R5ZI", "stm32l4r5zitx", None, ),
"0777": BoardInfo( "NUCLEO-F446RE", "stm32f446retx", None, ),
"0778": BoardInfo( "NUCLEO-F446ZE", "stm32f446zetx", None, ),
"0779": BoardInfo( "NUCLEO-L433RC-P", "stm32l433rctx", None, ),
"0780": BoardInfo( "NUCLEO-L011K4", "stm32l011k4tx", None, ),
"0781": BoardInfo( "NUCLEO-L4R5ZI-P", "stm32l4r5zitx", None, ),
"0783": BoardInfo( "NUCLEO-L010RB", "stm32l010rbtx", None, ),
"0785": BoardInfo( "NUCLEO-F042K6", "stm32f042k6tx", None, ),
"0788": BoardInfo( "DISCO-F469NI", "stm32f469nihx", None, ),
"0790": BoardInfo( "NUCLEO-L031K6", "stm32l031x6", None, ),
"0791": BoardInfo( "NUCLEO-F031K6", "stm32f031k6tx", None, ),
"0795": BoardInfo( "DISCO-F429ZI", "stm32f429zitx", None, ),
"0796": BoardInfo( "NUCLEO-F429ZI", "stm32f429xi", "nucleo_f429zi.bin", ),
"0797": BoardInfo( "NUCLEO-F439ZI", "stm32f439zitx", None, ),
"0805": BoardInfo( "DISCO-L053C8", "stm32l053c8tx", None, ),
"0810": BoardInfo( "DISCO-F334C8", "stm32f334c8tx", None, ),
"0812": BoardInfo( "NUCLEO-F722ZE", "stm32f722zetx", None, ),
"0813": BoardInfo( "NUCLEO-H743ZI", "stm32h743zitx", None, ),
"0814": BoardInfo( "DISCO-H747I", "stm32h747xihx", None, ),
"0815": BoardInfo( "DISCO-F746NG", "stm32f746nghx", None, ),
"0816": BoardInfo( "NUCLEO-F746ZG", "stm32f746zgtx", "NUCLEO_F746ZG.bin", ),
"0817": BoardInfo( "DISCO-F769NI", "stm32f769nihx", None, ),
"0818": BoardInfo( "NUCLEO-F767ZI", "stm32f767zitx", "NUCLEO_F767ZI.bin", ),
"0820": BoardInfo( "DISCO-L476VG", "stm32l476vgtx", None, ),
"0821": BoardInfo( "NUCLEO-L452RE", "stm32l452retx", None, ),
"0822": BoardInfo( "DISCO-L496AG", "stm32l496agix", None, ),
"0823": BoardInfo( "NUCLEO-L496ZG", "stm32l496zgtx", None, ),
"0824": BoardInfo( "LPCXpresso824-MAX", "lpc824", "l1_lpc824.bin", ),
"0825": BoardInfo( "DISCO-F412ZG", "stm32f412xg", "nucleo_f412zg.bin", ),
"0826": BoardInfo( "NUCLEO-F412ZG", "stm32f412xg", "nucleo_f412zg.bin", ),
"0827": BoardInfo( "NUCLEO-L486RG", "stm32l486rgtx", None, ),
"0828": BoardInfo( "NUCLEO-L496ZG-P", "stm32l496zgtx", None, ),
"0829": BoardInfo( "NUCLEO-L452RE-P", "stm32l452retx", None, ),
"0830": BoardInfo( "DISCO-F407VG", "stm32f407vgtx", None, ),
"0833": BoardInfo( "DISCO-L072CZ-LRWAN1", "stm32l072cztx", None, ),
"0835": BoardInfo( "NUCLEO-F207ZG", "stm32f207zgtx", "NUCLEO_F207ZG.bin", ),
"0836": BoardInfo( "NUCLEO-H743ZI2", "stm32h743zitx", None, ),
"0839": BoardInfo( "NUCLEO-WB55RG", "stm32wb55rgvx", None, ),
"0840": BoardInfo( "B96B-F446VE", "stm32f446vetx", None, ),
"0841": BoardInfo( "NUCLEO-G474RE", "stm32g474retx", None, ),
"0842": BoardInfo( "NUCLEO-H753ZI", "stm32h753zitx", None, ),
"0843": BoardInfo( "NUCLEO-H745ZI-Q", "stm32h745zitx", None, ),
"0847": BoardInfo( "DISCO-H745I", "stm32h745zihx", None, ),
"0849": BoardInfo( "NUCLEO-G070RB", "stm32g070rbtx", None, ),
"0850": BoardInfo( "NUCLEO-G431RB", "stm32g431rbtx", None, ),
"0851": BoardInfo( "NUCLEO-G431KB", "stm32g431kbtx", None, ),
"0852": BoardInfo( "NUCLEO-G031K8", "stm32g031K8tx", None, ),
"0853": BoardInfo( "NUCLEO-F301K8", "stm32f301k8tx", None, ),
"0854": BoardInfo( "NUCLEO-L552ZE-Q", "stm32l552zetxq", None, ),
"0855": BoardInfo( "DISCO-L562QE", "stm32l562qeixq", None, ),
"0860": BoardInfo( "NUCLEO-H7A3ZI-Q", "stm32h7a3zitxq", None, ),
"0866": BoardInfo( "NUCLEO-WL55JC", "stm32wl55jcix", None, ),
"0879": BoardInfo( "NUCLEO-F756ZG", "stm32f756zgtx", None, ),
"0882": BoardInfo( "NUCLEO-G491RE", "stm32g491retx", None, ),
"0883": BoardInfo( "NUCLEO-WB15CC", "stm32wb15ccux", None, ),
"0884": BoardInfo( "DISCO-WB5MMG", "stm32wb5mmghx", None, ),
"0885": BoardInfo( "B-L4S5I-IOT01A", "stm32l4s5vitx", None, ),
"0886": BoardInfo( "NUCLEO-U575ZI-Q", "stm32u575zitx", None, ),
"0887": BoardInfo( "B-U585I-IOT02A", "stm32u585aiix", None, ),
"1010": BoardInfo( "mbed NXP LPC1768", "lpc1768", "l1_lpc1768.bin", ),
"1017": BoardInfo( "mbed HRM1017", "nrf51", "l1_nrf51.bin", ),
"1018": BoardInfo( "Switch-Science-mbed-LPC824", "lpc824", "l1_lpc824.bin", ),
"1019": BoardInfo( "mbed TY51822r3", "nrf51", "l1_nrf51.bin", ),
"1040": BoardInfo( "mbed NXP LPC11U24", "lpc11u24", "l1_lpc11u24.bin", ),
"1050": BoardInfo( "NXP LPC800-MAX", "lpc800", "l1_lpc800.bin", ),
"1054": BoardInfo( "LPCXpresso54114-MAX", "lpc54114", "l1_lpc54114.bin", ),
"1056": BoardInfo( "LPCXpresso54608-MAX", "lpc54608", "l1_lpc54608.bin", ),
"1060": BoardInfo( "EA-LPC4088", "lpc4088qsb", "l1_lpc4088qsb.bin", ),
"1068": BoardInfo( "LPC11U68", "lpc11u68jbd100", None, ),
"1062": BoardInfo( "EA-LPC4088-Display-Module", "lpc4088dm", "l1_lpc4088dm.bin", ),
"1070": BoardInfo( "nRF51822-mKIT", "nrf51", "l1_nrf51.bin", ),
"1080": BoardInfo( "mBuino", "lpc11u24", "l1_lpc11u24.bin", ),
"1090": BoardInfo( "RedBearLab-nRF51822", "nrf51", "l1_nrf51.bin", ),
"1093": BoardInfo( "RedBearLab-BLE-Nano2", "nrf52", "l1_nrf52-dk.bin", ),
"1095": BoardInfo( "RedBearLab-BLE-Nano", "nrf51", "l1_nrf51.bin", ),
"1100": BoardInfo( "nRF51-DK", "nrf51", "l1_nrf51-dk.bin", ),
"1101": BoardInfo( "nRF52-DK", "nrf52", "l1_nrf52-dk.bin", ),
"1102": BoardInfo( "nRF52840-DK", "nrf52840", "l1_nrf52840-dk.bin", ),
"1114": BoardInfo( "mbed LPC1114FN28", "lpc11xx_32", "l1_mbed_LPC1114FN28.bin",),
"1120": BoardInfo( "nRF51-Dongle", "nrf51", "l1_nrf51.bin", ),
"1200": BoardInfo( "NCS36510-EVK", "ncs36510", "l1_ncs36510-evk.bin", ),
"1234": BoardInfo( "u-blox-C027", "lpc1768", "l1_lpc1768.bin", ),
"1236": BoardInfo( "u-blox EVK-ODIN-W2", "stm32f439xi", "ublox_evk_odin_w2.bin",),
"1237": BoardInfo( "u-blox-EVK-NINA-B1", "nrf52", "l1_nrf52-dk.bin", ),
"12A0": BoardInfo( "Calliope-mini", "nrf51", None, ),
"1304": BoardInfo( "NuMaker-PFM-M487", "m487jidae", None, ),
"1309": BoardInfo( "NuMaker-M252KG", "m252kg6ae", None, ),
"1310": BoardInfo( "NuMaker-IoT-M263A", "m263kiaae", None, ),
"1312": BoardInfo( "NuMaker-M2354", "m2354kjfae", None, ),
"1549": BoardInfo( "LPC1549", "lpc1549jbd100", None, ),
"1600": BoardInfo( "Bambino 210", "lpc4330", "l1_lpc4330.bin", ),
"1605": BoardInfo( "Bambino 210E", "lpc4330", "l1_lpc4330.bin", ),
"1900": BoardInfo( "CY8CKIT-062-WIFI-BT", "cy8c6xx7", "l1_cy8c6xx7.bin", ),
"1901": BoardInfo( "CY8CPROTO-062-4343W", "cy8c6xxa", "l1_cy8c6xxa.bin", ),
"1902": BoardInfo( "CY8CKIT-062-BLE", "cy8c6xx7", "l1_cy8c6xx7.bin", ),
"1903": BoardInfo( "CYW9P62S1-43012EVB-01","cy8c6xx7_s25fs512s", "l1_cy8c6xx7.bin", ),
"1904": BoardInfo( "CY8CPROTO-063-BLE", "cy8c6xx7_nosmif", "l1_cy8c6xx7.bin", ),
"1905": BoardInfo( "CY8CKIT-062-4343W", "cy8c6xxa", "l1_cy8c6xxa.bin", ),
"1906": BoardInfo( "CYW943012P6EVB-01", "cy8c6xx7", "l1_cy8c6xx7.bin", ),
"1907": BoardInfo( "CY8CPROTO-064-SB", "cy8c64xx_cm4_s25hx512t", "l1_cy8c6xx7.bin",),
"1908": BoardInfo( "CYW9P62S1-43438EVB-01","cy8c6xx7", "l1_cy8c6xx7.bin", ),
"1909": BoardInfo( "CY8CPROTO-062S2-43012","cy8c6xxa", "l1_cy8c6xxa.bin", ),
"190A": BoardInfo( "CY8CKIT-064S2-4343W", "cy8c64xa_cm4", "l1_cy8c6xxa.bin", ),
"190B": BoardInfo( "CY8CKIT-062S2-43012", "cy8c6xxa", "l1_062S2-43012.bin", ),
"190C": BoardInfo( "CY8CPROTO-064B0S3", "cy8c64x5_cm4", "l1_cy8c6xxa.bin", ),
"190D": BoardInfo( "AUGUST_CYW43012", "cy8c64xx_cm4", "l1_cy8c6xx7.bin", ),
"190E": BoardInfo( "CY8CPROTO-062S3-4343W","cy8c6xx5", "l1_cy8c6xxa.bin", ),
"190F": BoardInfo( "CY8CPROTO-064B0S1-BLE","cy8c64xx_cm4_nosmif", "l1_cy8c6xx7.bin", ),
"1910": BoardInfo( "CY8CKIT-064B0S2-4343W","cy8c64xa_cm4", "l1_cy8c6xxa.bin", ),
"1911": BoardInfo( "CY8CKIT-064S0S2-4343W","cy8c64xa_cm4", "l1_cy8c6xxa.bin", ),
"1912": BoardInfo( "CYSBSYSKIT-01", "cy8c6xxa", "l1_cy8c6xxa.bin", ),
"2201": BoardInfo( "WIZwiki_W7500", "w7500", "l1_w7500mbed.bin", ),
"2203": BoardInfo( "WIZwiki_W7500P", "w7500", "l1_w7500mbed.bin", ),
"2600": BoardInfo( "ep_agora", "nrf52840", None, ),
"3300": BoardInfo( "CC3220SF_LaunchXL", "cc3220sf", "l1_cc3220sf.bin", ),
"3701": BoardInfo( "Samsung_S5JS100", "s5js100", "s5js100.bin", ),
"4100": BoardInfo( "NAMote72", "stm32l152rctx", None, ),
"4337": BoardInfo( "LPC4337", "lpc4337", None, ),
"4600": BoardInfo( "Realtek RTL8195AM", "rtl8195am", "l1_rtl8195am.bin", ),
"5002": BoardInfo( "Arm V2M-MPS3", "cortex_m", None, ),
"5005": BoardInfo( "Arm V2M-MPS3", "cortex_m", None, ),
"5006": BoardInfo( "Arm Musca-A1", "musca_a1", "l1_musca_a1.bin", ),
"5007": BoardInfo( "Arm Musca-B1", "musca_b1", "l1_musca_b1.bin", ),
"5009": BoardInfo( "Arm Musca-S1", "musca_s1", None, ),
"7402": BoardInfo( "mbed 6LoWPAN Border Router HAT", "k64f", "l1_k64f.bin", ),
"7778": BoardInfo( "Teensy 3.1", "mk20dx256vlh7", None, ),
"8080": BoardInfo( "L-Tek FF1705", "stm32l151cctx", None, ),
"8081": BoardInfo( "L-Tek FF-LPC546XX", "lpc54606", None, ),
"9004": BoardInfo( "Arch Pro", "lpc1768", "l1_lpc1768.bin", ),
"9009": BoardInfo( "Arch BLE", "nrf51", "l1_nrf51.bin", ),
"9012": BoardInfo( "Seeed Tiny BLE", "nrf51", "l1_nrf51.bin", ),
"9014": BoardInfo( "Seeed 96Boards Nitrogen", "nrf52", "l1_nrf52-dk.bin", ),
"9900": BoardInfo( "micro:bit", "nrf51", "l1_microbit.bin", ),
"9901": BoardInfo( "micro:bit", "nrf51", "l1_microbit.bin", ),
"9903": BoardInfo( "micro:bit v2", "nrf52833", "microbitv2.bin", ),
"9904": BoardInfo( "micro:bit v2", "nrf52833", "microbitv2.bin", ),
"9905": BoardInfo( "micro:bit v2", "nrf52833", "microbitv2.bin", ),
"9906": BoardInfo( "micro:bit v2", "nrf52833", "microbitv2.bin", ),
"C004": BoardInfo( "tinyK20", "k20d50m", "l1_k20d50m.bin", ),
"C006": BoardInfo( "VBLUno51", "nrf51", "l1_nrf51.bin", ),
}
| |
import os
from datetime import timedelta
from django.conf import settings
from django.test.utils import override_settings
from django.urls import reverse
from freezegun import freeze_time
from olympia import amo
from olympia.amo.tests import (
APITestClientWebToken,
APITestClientJWT,
get_random_ip,
reverse_ns,
TestCase,
user_factory,
)
from .test_models import UploadMixin
from ..models import FileUpload
from ..views import FileUploadViewSet
files_fixtures = 'src/olympia/files/fixtures/files/'
unicode_filenames = 'src/olympia/files/fixtures/files/unicode-filenames.xpi'
not_binary = 'install.js'
binary = 'dictionaries/ar.dic'
class TestServeFileUpload(UploadMixin, TestCase):
def setUp(self):
super().setUp()
self.upload = self.get_upload('webextension.xpi')
self.url = reverse('files.serve_file_upload', args=[self.upload.uuid.hex])
def test_returns_error_when_no_access_token(self):
resp = self.client.get(self.url)
assert resp.status_code == 403
def test_returns_error_when_access_token_is_invalid(self):
resp = self.client.get(f'{self.url}?access_token=nope')
assert resp.status_code == 403
def test_get(self):
resp = self.client.get(self.upload.get_authenticated_download_url())
assert resp.status_code == 200
assert resp['content-type'] == 'application/octet-stream'
assert resp[settings.XSENDFILE_HEADER] == self.upload.path
def test_returns_410_when_upload_path_is_falsey(self):
self.upload.path = ''
self.upload.save()
resp = self.client.get(self.upload.get_authenticated_download_url())
assert resp.status_code == 410
class TestFileUploadViewSet(TestCase):
client_class = APITestClientWebToken
def setUp(self):
super().setUp()
self.list_url = reverse_ns('addon-upload-list', api_version='v5')
self.user = user_factory(read_dev_agreement=self.days_ago(0))
# Add a file upload
self.upload = FileUpload.objects.create(
user=self.user, source=amo.UPLOAD_SOURCE_ADDON_API, ip_address='127.0.0.9'
)
# Add some other ones from other users
self.other_user_upload = FileUpload.objects.create(
user=user_factory(),
source=amo.UPLOAD_SOURCE_ADDON_API,
ip_address='127.0.0.10',
)
FileUpload.objects.create(
user=user_factory(),
source=amo.UPLOAD_SOURCE_ADDON_API,
ip_address='127.0.0.11',
)
self.detail_url = reverse_ns(
'addon-upload-detail',
kwargs={'uuid': self.upload.uuid.hex},
api_version='v5',
)
self.client.login_api(self.user)
def _xpi_filepath(self, guid, version):
return os.path.join(
'src',
'olympia',
'signing',
'fixtures',
f'{guid}-{version}.xpi',
)
def _create_post(self, channel_name='listed', ip='63.245.208.194'):
with open(self._xpi_filepath('@upload-version', '3.0'), 'rb') as upload:
data = {
'upload': upload,
'channel': channel_name,
}
response = self.client.post(
self.list_url,
data,
format='multipart',
REMOTE_ADDR=ip,
HTTP_X_FORWARDED_FOR=f'{ip}, {get_random_ip()}',
)
return response
def test_not_authenticated(self):
self.client.logout_api()
response = self.client.get(
self.list_url,
)
assert response.status_code == 401
def test_no_developer_agreement(self):
self.user.update(read_dev_agreement=None)
response = self._create_post()
assert response.status_code in [401, 403] # JWT auth is a 401; web auth is 403
def _test_create(self, channel, channel_name):
upload_count_before = FileUpload.objects.count()
response = self._create_post(channel_name)
assert response.status_code == 201
assert FileUpload.objects.count() == upload_count_before + 1
upload = FileUpload.objects.last()
assert upload.name == f'{upload.uuid.hex}_@upload-version-3.0.xpi'
assert upload.source == amo.UPLOAD_SOURCE_ADDON_API
assert upload.user == self.user
assert upload.version == '3.0'
assert upload.ip_address == '63.245.208.194'
assert upload.channel == channel
data = response.json()
assert data['uuid'] == upload.uuid.hex
assert data['channel'] == channel_name
def test_create_listed(self):
self._test_create(amo.RELEASE_CHANNEL_LISTED, 'listed')
def test_create_unlisted(self):
self._test_create(amo.RELEASE_CHANNEL_UNLISTED, 'unlisted')
def test_list(self):
response = self.client.get(
self.list_url,
)
data = response.json()['results']
assert len(data) == 1 # only the users own uploads
assert data[0]['uuid'] == self.upload.uuid.hex
assert data[0]['url'] == self.detail_url
def test_api_unavailable(self):
with override_settings(DRF_API_GATES={'v5': []}):
response = self.client.get(
self.list_url,
)
assert response.status_code == 403
def test_retrieve(self):
response = self.client.get(self.detail_url)
data = response.json()
assert data['uuid'] == self.upload.uuid.hex
assert data['url'] == self.detail_url
def test_cannot_retrieve_other_uploads(self):
detail_url = reverse_ns(
'addon-upload-detail',
kwargs={'uuid': self.other_user_upload.uuid.hex},
api_version='v5',
)
response = self.client.get(
detail_url,
)
assert response.status_code == 404
def test_throttling_ip_burst(self):
ip = '63.245.208.194'
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for _ in range(0, 6):
self._add_fake_throttling_action(
view_class=FileUploadViewSet,
url=self.list_url,
user=user_factory(),
remote_addr=ip,
)
# At this point we should be throttled since we're using the same
# IP. (we're still inside the frozen time context).
response = self._create_post(ip=ip)
assert response.status_code == 429, response.content
# 'Burst' throttling is 1 minute, so 61 seconds later we should be
# allowed again.
frozen_time.tick(delta=timedelta(seconds=61))
response = self._create_post(ip=ip)
assert response.status_code == 201, response.content
def test_throttling_ip_hourly(self):
ip = '63.245.208.194'
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for _ in range(0, 50):
self._add_fake_throttling_action(
view_class=FileUploadViewSet,
url=self.list_url,
user=user_factory(),
remote_addr=ip,
)
# At this point we should be throttled since we're using the same
# IP. (we're still inside the frozen time context).
response = self._create_post(ip=ip)
assert response.status_code == 429, response.content
# One minute later, past the 'burst' throttling period, we're still
# blocked by the 'hourly' limit.
frozen_time.tick(delta=timedelta(seconds=61))
response = self._create_post(ip=ip)
assert response.status_code == 429
# 'hourly' throttling is 1 hour, so 3601 seconds later we should
# be allowed again.
frozen_time.tick(delta=timedelta(seconds=3601))
response = self._create_post(ip=ip)
assert response.status_code == 201
def test_throttling_user_burst(self):
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for _ in range(0, 6):
self._add_fake_throttling_action(
view_class=FileUploadViewSet,
url=self.list_url,
user=self.user,
remote_addr=get_random_ip(),
)
# At this point we should be throttled since we're using the same
# user. (we're still inside the frozen time context).
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# 'Burst' throttling is 1 minute, so 61 seconds later we should be
# allowed again.
frozen_time.tick(delta=timedelta(seconds=61))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 201, response.content
def test_throttling_user_hourly(self):
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for _ in range(0, 20):
self._add_fake_throttling_action(
view_class=FileUploadViewSet,
url=self.list_url,
user=self.user,
remote_addr=get_random_ip(),
)
# At this point we should be throttled since we're using the same
# user. (we're still inside the frozen time context).
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# One minute later, past the 'burst' throttling period, we're still
# blocked by the 'hourly' limit.
frozen_time.tick(delta=timedelta(seconds=61))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# 3601 seconds later we should be allowed again.
frozen_time.tick(delta=timedelta(seconds=3601))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 201, response.content
def test_throttling_user_daily(self):
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for _ in range(0, 48):
self._add_fake_throttling_action(
view_class=FileUploadViewSet,
url=self.list_url,
user=self.user,
remote_addr=get_random_ip(),
)
# At this point we should be throttled since we're using the same
# user. (we're still inside the frozen time context).
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# One minute later, past the 'burst' throttling period, we're still
# blocked by the 'hourly' limit.
frozen_time.tick(delta=timedelta(seconds=61))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# After the hourly limit, still blocked.
frozen_time.tick(delta=timedelta(seconds=3601))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 429, response.content
# 86401 seconds later we should be allowed again (24h + 1s).
frozen_time.tick(delta=timedelta(seconds=86401))
response = self._create_post(ip=get_random_ip())
assert response.status_code == 201, response.content
class TestFileUploadViewSetJWTAuth(TestFileUploadViewSet):
client_class = APITestClientJWT
| |
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Tobias Wegner, tobias.wegner@cern.ch, 2017-2018
# - Paul Nilsson, paul.nilsson@cern.ch, 2017
# Reimplemented by Alexey Anisenkov
import os
import logging
import re
from time import time
from .common import resolve_common_transfer_errors, verify_catalog_checksum #, get_timeout
from pilot.util.container import execute
from pilot.common.exception import PilotException, ErrorCodes
#from pilot.util.timer import timeout
logger = logging.getLogger(__name__)
require_replicas = True ## indicate if given copytool requires input replicas to be resolved
allowed_schemas = ['root'] # prioritized list of supported schemas for transfers by given copytool
copy_command = 'xrdcp'
def is_valid_for_copy_in(files):
return True ## FIX ME LATER
def is_valid_for_copy_out(files):
return True ## FIX ME LATER
def _resolve_checksum_option(setup, **kwargs):
cmd = "%s --version" % copy_command
if setup:
cmd = "source %s; %s" % (setup, cmd)
logger.info("Execute command (%s) to check xrdcp client version" % cmd)
rcode, stdout, stderr = execute(cmd, **kwargs)
logger.info("return code: %s" % rcode)
logger.info("return output: %s" % (stdout + stderr))
cmd = "%s -h" % copy_command
if setup:
cmd = "source %s; %s" % (setup, cmd)
logger.info("Execute command (%s) to decide which option should be used to calc/verify file checksum.." % cmd)
rcode, stdout, stderr = execute(cmd, **kwargs)
output = stdout + stderr
logger.info("return code: %s" % rcode)
logger.debug("return output: %s" % output)
coption = ""
checksum_type = 'adler32' ## consider only adler32 for now
if rcode:
logger.error('FAILED to execute command=%s: %s' % (cmd, output))
else:
if "--cksum" in output:
coption = "--cksum %s:print" % checksum_type
elif "-adler" in output and checksum_type == 'adler32':
coption = "-adler"
elif "-md5" in output and checksum_type == 'md5':
coption = "-md5"
if coption:
logger.info("Use %s option to get the checksum for %s command" % (coption, copy_command))
return coption
#@timeout(seconds=10800)
def _stagefile(coption, source, destination, filesize, is_stagein, setup=None, **kwargs):
"""
Stage the file (stagein or stageout)
:return: destination file details (checksum, checksum_type) in case of success, throw exception in case of failure
:raise: PilotException in case of controlled error
"""
filesize_cmd, checksum_cmd, checksum_type = None, None, None
cmd = '%s -np -f %s %s %s' % (copy_command, coption, source, destination)
if setup:
cmd = "source %s; %s" % (setup, cmd)
#timeout = get_timeout(filesize)
#logger.info("Executing command: %s, timeout=%s" % (cmd, timeout))
rcode, stdout, stderr = execute(cmd, **kwargs)
logger.info('rcode=%d, stdout=%s, stderr=%s' % (rcode, stdout, stderr))
if rcode: ## error occurred
error = resolve_common_transfer_errors(stdout + stderr, is_stagein=is_stagein)
#rcode = error.get('rcode') ## TO BE IMPLEMENTED
#if not is_stagein and rcode == PilotErrors.ERR_CHKSUMNOTSUP: ## stage-out, on fly checksum verification is not supported .. ignore
# logger.info('stage-out: ignore ERR_CHKSUMNOTSUP error .. will explicitly verify uploaded file')
# return None, None
raise PilotException(error.get('error'), code=error.get('rcode'), state=error.get('state'))
# extract filesize and checksum values from output
if coption != "":
filesize_cmd, checksum_cmd, checksum_type = get_file_info_from_output(stdout + stderr)
## verify transfer by returned checksum or call remote checksum calculation
## to be moved at the base level
is_verified = True ## TO BE IMPLEMENTED LATER
if not is_verified:
rcode = ErrorCodes.GETADMISMATCH if is_stagein else ErrorCodes.PUTADMISMATCH
raise PilotException("Copy command failed", code=rcode, state='AD_MISMATCH')
return filesize_cmd, checksum_cmd, checksum_type
# @timeout(seconds=10800)
def copy_in(files, **kwargs):
"""
Download given files using xrdcp command.
:param files: list of `FileSpec` objects
:raise: PilotException in case of controlled error
"""
#allow_direct_access = kwargs.get('allow_direct_access') or False
setup = kwargs.pop('copytools', {}).get('xrdcp', {}).get('setup')
coption = _resolve_checksum_option(setup, **kwargs)
trace_report = kwargs.get('trace_report')
localsite = os.environ.get('RUCIO_LOCAL_SITE_ID', None)
for fspec in files:
# update the trace report
localsite = localsite if localsite else fspec.ddmendpoint
trace_report.update(localSite=localsite, remoteSite=fspec.ddmendpoint, filesize=fspec.filesize)
trace_report.update(filename=fspec.lfn, guid=fspec.guid.replace('-', ''))
trace_report.update(scope=fspec.scope, dataset=fspec.dataset)
# continue loop for files that are to be accessed directly ## TOBE DEPRECATED (anisyonk)
#if fspec.is_directaccess(ensure_replica=False) and allow_direct_access and fspec.accessmode == 'direct':
# fspec.status_code = 0
# fspec.status = 'remote_io'
# trace_report.update(url=fspec.turl, clientState='FOUND_ROOT', stateReason='direct_access')
# trace_report.send()
# continue
trace_report.update(catStart=time())
dst = fspec.workdir or kwargs.get('workdir') or '.'
destination = os.path.join(dst, fspec.lfn)
try:
filesize_cmd, checksum_cmd, checksum_type = _stagefile(coption, fspec.turl, destination, fspec.filesize,
is_stagein=True, setup=setup, **kwargs)
fspec.status_code = 0
fspec.status = 'transferred'
except PilotException as error:
fspec.status = 'failed'
fspec.status_code = error.get_error_code()
diagnostics = error.get_detail()
state = 'STAGEIN_ATTEMPT_FAILED'
trace_report.update(clientState=state, stateReason=diagnostics, timeEnd=time())
trace_report.send()
raise PilotException(diagnostics, code=fspec.status_code, state=state)
else:
# compare checksums
fspec.checksum[checksum_type] = checksum_cmd # remote checksum
state, diagnostics = verify_catalog_checksum(fspec, destination)
if diagnostics != "":
trace_report.update(clientState=state or 'STAGEIN_ATTEMPT_FAILED', stateReason=diagnostics,
timeEnd=time())
trace_report.send()
raise PilotException(diagnostics, code=fspec.status_code, state=state)
trace_report.update(clientState='DONE', stateReason='OK', timeEnd=time())
trace_report.send()
return files
# @timeout(seconds=10800)
def copy_out(files, **kwargs):
"""
Upload given files using xrdcp command.
:param files: list of `FileSpec` objects
:raise: PilotException in case of controlled error
"""
setup = kwargs.pop('copytools', {}).get('xrdcp', {}).get('setup')
coption = _resolve_checksum_option(setup, **kwargs)
trace_report = kwargs.get('trace_report')
for fspec in files:
trace_report.update(scope=fspec.scope, dataset=fspec.dataset, url=fspec.surl, filesize=fspec.filesize)
trace_report.update(catStart=time(), filename=fspec.lfn, guid=fspec.guid.replace('-', ''))
try:
filesize_cmd, checksum_cmd, checksum_type = _stagefile(coption, fspec.surl, fspec.turl, fspec.filesize,
is_stagein=False, setup=setup, **kwargs)
fspec.status_code = 0
fspec.status = 'transferred'
trace_report.update(clientState='DONE', stateReason='OK', timeEnd=time())
trace_report.send()
except PilotException as error:
fspec.status = 'failed'
fspec.status_code = error.get_error_code()
state = 'STAGEOUT_ATTEMPT_FAILED'
diagnostics = error.get_detail()
trace_report.update(clientState=state, stateReason=diagnostics, timeEnd=time())
trace_report.send()
raise PilotException(diagnostics, code=fspec.status_code, state=state)
else:
# compare checksums
fspec.checksum[checksum_type] = checksum_cmd # remote checksum
state, diagnostics = verify_catalog_checksum(fspec, fspec.surl)
if diagnostics != "":
trace_report.update(clientState=state or 'STAGEIN_ATTEMPT_FAILED', stateReason=diagnostics,
timeEnd=time())
trace_report.send()
raise PilotException(diagnostics, code=fspec.status_code, state=state)
return files
def get_file_info_from_output(output):
"""
Extract file size, checksum value from xrdcp --chksum command output
:return: (filesize [int/None], checksum, checksum_type) or (None, None, None) in case of failure
"""
if not output:
return None, None, None
if not ("xrootd" in output or "XRootD" in output or "adler32" in output):
logger.warning("WARNING: Failed to extract checksum: Unexpected output: %s" % output)
return None, None, None
pattern = r"(?P<type>md5|adler32):\ (?P<checksum>[a-zA-Z0-9]+)\ \S+\ (?P<filesize>[0-9]+)" # Python 3 (added r)
filesize, checksum, checksum_type = None, None, None
m = re.search(pattern, output)
if m:
checksum_type = m.group('type')
checksum = m.group('checksum')
checksum = checksum.zfill(8) # make it 8 chars length (adler32 xrdcp fix)
filesize = m.group('filesize')
if filesize:
try:
filesize = int(filesize)
except ValueError as e:
logger.warning('failed to convert filesize to int: %s' % e)
filesize = None
else:
logger.warning("WARNING: Checksum/file size info not found in output: failed to match pattern=%s in output=%s" % (pattern, output))
return filesize, checksum, checksum_type
| |
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
logtools._filter
Filter rows based on blacklists and field matching.
"""
import re
import sys
import string
import logging
from itertools import imap
from functools import partial
from operator import and_
from optparse import OptionParser
import acora
from _config import logtools_config, interpolate_config, AttrDict
import logtools.parsers
__all__ = ['logfilter_parse_args', 'logfilter',
'logfilter_main']
# Used for aho-corasick style matching on word-boundaries.
# Closely mimicks the behavior of the python re module's \w
# character set, however might diverge slightly in case of locale-
# specific character sets.
_word_boundary_chars = set(string.printable)\
.difference(string.letters)\
.difference(string.digits)\
.difference(('_',))
def _is_blacklisted_re_wb(line, delimiter, field, blacklist, re_flags):
val = line.split(delimiter)[field-1]
for b in blacklist:
if re.search(r'\b{0}\b'.format(b), val, re_flags):
return True
return False
def _is_blacklisted_re(line, delimiter, field, blacklist, re_flags):
val = line.split(delimiter)[field-1]
for b in blacklist:
if re.search(r'{0}'.format(b), val, re_flags):
return True
return False
def _is_blacklisted_ac_wb(line, delimiter, field, transform_func, ac):
val = line.split(delimiter)[field-1]
L = len(val)
matches = ac.findall(transform_func(val))
for match in matches:
word, pos = match
l = len(word)
if (pos == 0 or val[pos-1] in _word_boundary_chars) and \
(pos+l == L or val[pos+l] in _word_boundary_chars):
return True
return False
def _is_blacklisted_ac(line, delimiter, field, transform_func, ac):
val = line.split(delimiter)[field-1]
matches = ac.findall(transform_func(val))
if matches:
return True
return False
def logfilter_parse_args():
usage = "%prog " \
"-b <blacklist_file> " \
"[--reverse]"
parser = OptionParser(usage=usage)
parser.add_option("-b", "--blacklist", dest="blacklist", default=None,
help="Blacklist (whitelist when in --reverse mode) file")
parser.add_option("-I", "--ignore-case", dest="ignorecase", action="store_true",
help="Ignore case when matching")
parser.add_option("-W", "--word-boundaries", dest="word_boundaries", action="store_true",
help="Only match on word boundaries (e.g start/end of line and/or spaces)")
parser.add_option("-A", '--with-acora', dest='with_acora', action="store_true",
help="Use Aho-Corasick multiple string pattern matching instead of regexps. Suitable for whole word matching")
parser.add_option("-r", "--reverse", dest="reverse", action="store_true",
help="Reverse filtering")
parser.add_option("-p", "--print", dest="printlines", action="store_true",
help="Print non-filtered lines")
parser.add_option("--parser", dest="parser",
help="Feed logs through a parser. Useful when reading encoded/escaped formats (e.g JSON) and when " \
"selecting parsed fields rather than matching via regular expression.")
parser.add_option("-d", "--delimiter", dest="delimiter",
help="Delimiter character for field-separation (when not using a --parser)")
parser.add_option("-f", "--field", dest="field",
help="Index of field to use for filtering against")
parser.add_option("-P", "--profile", dest="profile", default='logfilter',
help="Configuration profile (section in configuration file)")
options, args = parser.parse_args()
# Interpolate from configuration and open filehandle
options.field = interpolate_config(options.field, options.profile, 'field')
options.delimiter = interpolate_config(options.delimiter, options.profile, 'delimiter', default=' ')
options.blacklist = open(interpolate_config(options.blacklist,
options.profile, 'blacklist'), "r")
options.parser = interpolate_config(options.parser, options.profile, 'parser',
default=False)
options.reverse = interpolate_config(options.reverse,
options.profile, 'reverse', default=False, type=bool)
options.ignorecase = interpolate_config(options.ignorecase,
options.profile, 'ignorecase', default=False, type=bool)
options.word_boundaries = interpolate_config(options.word_boundaries,
options.profile, 'word_boundaries', default=False, type=bool)
options.with_acora = interpolate_config(options.with_acora,
options.profile, 'with_acora', default=False, type=bool)
options.printlines = interpolate_config(options.printlines,
options.profile, 'print', default=False, type=bool)
if options.parser and not options.field:
parser.error("Must supply --field parameter when using parser-based matching.")
return AttrDict(options.__dict__), args
def logfilter(fh, blacklist, field, parser=None, reverse=False,
delimiter=None, ignorecase=False, with_acora=False,
word_boundaries=False, **kwargs):
"""Filter rows from a log stream using a blacklist"""
blacklist = dict.fromkeys([l.strip() for l \
in blacklist \
if l and not l.startswith('#')])
re_flags = 0
if ignorecase:
re_flags = re.IGNORECASE
_is_blacklisted=None
if with_acora is False:
# Regular expression based matching
if word_boundaries:
_is_blacklisted = partial(_is_blacklisted_re_wb,
delimiter=delimiter, field=field, blacklist=blacklist, re_flags=re_flags)
else:
_is_blacklisted = partial(_is_blacklisted_re,
delimiter=delimiter, field=field, blacklist=blacklist, re_flags=re_flags)
else:
# Aho-Corasick multiple string pattern matching
# using the acora Cython library
builder = acora.AcoraBuilder(*blacklist)
ac = builder.build()
_transform_func = lambda x: x
if ignorecase:
_transform_func = lambda x: x.lower()
if word_boundaries:
_is_blacklisted = partial(_is_blacklisted_ac_wb,
delimiter=delimiter, field=field, transform_func=_transform_func, ac=ac)
else:
_is_blacklisted = partial(_is_blacklisted_ac,
delimiter=delimiter, field=field, transform_func=_transform_func, ac=ac)
_is_blacklisted_func = _is_blacklisted
if parser:
# Custom parser specified, use field-based matching
parser = eval(parser, vars(logtools.parsers), {})()
fields = field.split(',')
is_indices = reduce(and_, (k.isdigit() for k in fields), True)
if is_indices:
# Field index based matching
def _is_blacklisted_func(line):
parsed_line = parser(line)
for field in fields:
if _is_blacklisted(parsed_line.by_index(field)):
return True
return False
else:
# Named field based matching
def _is_blacklisted_func(line):
parsed_line = parser(line)
for field in fields:
if _is_blacklisted(parsed_line.by_index(field)):
return True
return False
num_lines=0
num_filtered=0
num_nomatch=0
for line in imap(lambda x: x.strip(), fh):
try:
is_blacklisted = _is_blacklisted_func(line)
except (KeyError, ValueError):
# Parsing error
logging.warn("No match for line: %s", line)
num_nomatch +=1
continue
else:
if is_blacklisted ^ reverse:
logging.debug("Filtering line: %s", line)
num_filtered+=1
continue
num_lines+=1
yield line
logging.info("Number of lines after filtering: %s", num_lines)
logging.info("Number of lines filtered: %s", num_filtered)
if num_nomatch:
logging.info("Number of lines could not match on: %s", num_nomatch)
return
def logfilter_main():
"""Console entry-point"""
options, args = logfilter_parse_args()
if options.printlines:
for line in logfilter(fh=sys.stdin, *args, **options):
print line
else:
for line in logfilter(fh=sys.stdin, *args, **options):
pass
return 0
| |
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 15078 if testnet else 5078
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Implementation of the REST interface between the workers and the server.
rpc.py implements the client side of it, server.py implements the server side.
See :doc:`/central_scheduler` for more info.
"""
import json
import logging
import socket
import time
from luigi.six.moves.urllib.parse import urljoin, urlencode, urlparse
from luigi.six.moves.urllib.request import urlopen
from luigi.six.moves.urllib.error import URLError
from luigi import configuration
from luigi.scheduler import PENDING, Scheduler
HAS_UNIX_SOCKET = True
HAS_REQUESTS = True
try:
import requests_unixsocket as requests
except ImportError:
HAS_UNIX_SOCKET = False
try:
import requests
except ImportError:
HAS_REQUESTS = False
logger = logging.getLogger('luigi-interface') # TODO: 'interface'?
def _urljoin(base, url):
"""
Join relative URLs to base URLs like urllib.parse.urljoin but support
arbitrary URIs (esp. 'http+unix://').
"""
parsed = urlparse(base)
scheme = parsed.scheme
return urlparse(
urljoin(parsed._replace(scheme='http').geturl(), url)
)._replace(scheme=scheme).geturl()
class RPCError(Exception):
def __init__(self, message, sub_exception=None):
super(RPCError, self).__init__(message)
self.sub_exception = sub_exception
class URLLibFetcher(object):
raises = (URLError, socket.timeout)
def fetch(self, full_url, body, timeout):
body = urlencode(body).encode('utf-8')
return urlopen(full_url, body, timeout).read().decode('utf-8')
class RequestsFetcher(object):
def __init__(self, session):
from requests import exceptions as requests_exceptions
self.raises = requests_exceptions.RequestException
self.session = session
def fetch(self, full_url, body, timeout):
resp = self.session.get(full_url, data=body, timeout=timeout)
resp.raise_for_status()
return resp.text
class RemoteScheduler(Scheduler):
"""
Scheduler proxy object. Talks to a RemoteSchedulerResponder.
"""
def __init__(self, url='http://localhost:8082/', connect_timeout=None):
assert not url.startswith('http+unix://') or HAS_UNIX_SOCKET, (
'You need to install requests-unixsocket for Unix socket support.'
)
self._url = url.rstrip('/')
config = configuration.get_config()
if connect_timeout is None:
connect_timeout = config.getfloat('core', 'rpc-connect-timeout', 10.0)
self._connect_timeout = connect_timeout
if HAS_REQUESTS:
self._fetcher = RequestsFetcher(requests.Session())
else:
self._fetcher = URLLibFetcher()
def _wait(self):
time.sleep(30)
def _fetch(self, url_suffix, body, log_exceptions=True, attempts=3):
full_url = _urljoin(self._url, url_suffix)
last_exception = None
attempt = 0
while attempt < attempts:
attempt += 1
if last_exception:
logger.info("Retrying...")
self._wait() # wait for a bit and retry
try:
response = self._fetcher.fetch(full_url, body, self._connect_timeout)
break
except self._fetcher.raises as e:
last_exception = e
if log_exceptions:
logger.exception("Failed connecting to remote scheduler %r", self._url)
continue
else:
raise RPCError(
"Errors (%d attempts) when connecting to remote scheduler %r" %
(attempts, self._url),
last_exception
)
return response
def _request(self, url, data, log_exceptions=True, attempts=3, allow_null=True):
body = {'data': json.dumps(data)}
for _ in range(attempts):
page = self._fetch(url, body, log_exceptions, attempts)
response = json.loads(page)["response"]
if allow_null or response is not None:
return response
raise RPCError("Received null response from remote scheduler %r" % self._url)
def ping(self, worker):
# just one attempt, keep-alive thread will keep trying anyway
self._request('/api/ping', {'worker': worker}, attempts=1)
def add_task(self, worker, task_id, status=PENDING, runnable=True,
deps=None, new_deps=None, expl=None, resources=None, priority=0,
family='', module=None, params=None, assistant=False,
tracking_url=None):
self._request('/api/add_task', {
'task_id': task_id,
'worker': worker,
'status': status,
'runnable': runnable,
'deps': deps,
'new_deps': new_deps,
'expl': expl,
'resources': resources,
'priority': priority,
'family': family,
'module': module,
'params': params,
'assistant': assistant,
'tracking_url': tracking_url,
})
def get_work(self, worker, host=None, assistant=False, current_tasks=None):
return self._request(
'/api/get_work',
{
'worker': worker,
'host': host,
'assistant': assistant,
'current_tasks': current_tasks,
},
allow_null=False,
)
def graph(self):
return self._request('/api/graph', {})
def dep_graph(self, task_id, include_done=True):
return self._request('/api/dep_graph', {'task_id': task_id, 'include_done': include_done})
def inverse_dep_graph(self, task_id, include_done=True):
return self._request('/api/inverse_dep_graph', {
'task_id': task_id, 'include_done': include_done})
def task_list(self, status, upstream_status, search=None):
return self._request('/api/task_list', {
'search': search,
'status': status,
'upstream_status': upstream_status,
})
def worker_list(self):
return self._request('/api/worker_list', {})
def resource_list(self):
return self._request('/api/resource_list', {})
def task_search(self, task_str):
return self._request('/api/task_search', {'task_str': task_str})
def fetch_error(self, task_id):
return self._request('/api/fetch_error', {'task_id': task_id})
def add_worker(self, worker, info):
return self._request('/api/add_worker', {'worker': worker, 'info': info})
def disable_worker(self, worker):
return self._request('/api/disable_worker', {'worker': worker})
def update_resources(self, **resources):
return self._request('/api/update_resources', resources)
def prune(self):
return self._request('/api/prune', {})
def re_enable_task(self, task_id):
return self._request('/api/re_enable_task', {'task_id': task_id})
def set_task_status_message(self, task_id, status_message):
self._request('/api/set_task_status_message', {
'task_id': task_id,
'status_message': status_message
})
def get_task_status_message(self, task_id):
return self._request('/api/get_task_status_message', {'task_id': task_id})
| |
import datetime
import mongoengine as mongo
import httplib2
import pickle
import base64
from StringIO import StringIO
from oauth2client.client import Error as OAuthError
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
from lxml import etree
from django.db import models
from django.contrib.auth.models import User
from mongoengine.queryset import OperationError
import vendor.opml as opml
from apps.rss_feeds.models import Feed, DuplicateFeed, MStarredStory
from apps.reader.models import UserSubscription, UserSubscriptionFolders
from utils import json_functions as json, urlnorm
from utils import log as logging
from utils.feed_functions import timelimit
from utils.feed_functions import add_object_to_folder
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^oauth2client\.django_orm\.FlowField"])
add_introspection_rules([], ["^oauth2client\.django_orm\.CredentialsField"])
class OAuthToken(models.Model):
user = models.OneToOneField(User, null=True, blank=True)
session_id = models.CharField(max_length=50, null=True, blank=True)
uuid = models.CharField(max_length=50, null=True, blank=True)
remote_ip = models.CharField(max_length=50, null=True, blank=True)
request_token = models.CharField(max_length=50)
request_token_secret = models.CharField(max_length=50)
access_token = models.CharField(max_length=50)
access_token_secret = models.CharField(max_length=50)
credential = models.TextField(null=True, blank=True)
created_date = models.DateTimeField(default=datetime.datetime.now)
class Importer:
def clear_feeds(self):
UserSubscription.objects.filter(user=self.user).delete()
def clear_folders(self):
UserSubscriptionFolders.objects.filter(user=self.user).delete()
def get_folders(self):
self.usf, _ = UserSubscriptionFolders.objects.get_or_create(user=self.user,
defaults={'folders': '[]'})
return json.decode(self.usf.folders)
class OPMLExporter(Importer):
def __init__(self, user):
self.user = user
self.fetch_feeds()
def process(self, verbose=False):
now = str(datetime.datetime.now())
root = Element('opml')
root.set('version', '1.1')
root.append(Comment('Generated by NewsBlur - www.newsblur.com'))
head = SubElement(root, 'head')
title = SubElement(head, 'title')
title.text = 'NewsBlur Feeds'
dc = SubElement(head, 'dateCreated')
dc.text = now
dm = SubElement(head, 'dateModified')
dm.text = now
folders = self.get_folders()
body = SubElement(root, 'body')
self.process_outline(body, folders, verbose=verbose)
return tostring(root)
def process_outline(self, body, folders, verbose=False):
for obj in folders:
if isinstance(obj, int) and obj in self.feeds:
feed = self.feeds[obj]
if verbose:
print " ---> Adding feed: %s - %s" % (feed['id'],
feed['feed_title'][:30])
feed_attrs = self.make_feed_row(feed)
body.append(Element('outline', feed_attrs))
elif isinstance(obj, dict):
for folder_title, folder_objs in obj.items():
if verbose:
print " ---> Adding folder: %s" % folder_title
folder_element = Element('outline', {'text': folder_title, 'title': folder_title})
body.append(self.process_outline(folder_element, folder_objs, verbose=verbose))
return body
def make_feed_row(self, feed):
feed_attrs = {
'text': feed['feed_title'],
'title': feed['feed_title'],
'type': 'rss',
'version': 'RSS',
'htmlUrl': feed['feed_link'] or "",
'xmlUrl': feed['feed_address'] or "",
}
return feed_attrs
def fetch_feeds(self):
subs = UserSubscription.objects.filter(user=self.user)
self.feeds = []
for sub in subs:
try:
self.feeds.append((sub.feed_id, sub.canonical()))
except Feed.DoesNotExist:
continue
self.feeds = dict(self.feeds)
class OPMLImporter(Importer):
def __init__(self, opml_xml, user):
self.user = user
self.opml_xml = opml_xml
@timelimit(10)
def try_processing(self):
folders = self.process()
return folders
def process(self):
# self.clear_feeds()
outline = opml.from_string(self.opml_xml)
folders = self.get_folders()
try:
folders = self.process_outline(outline, folders)
except AttributeError:
folders = None
else:
# self.clear_folders()
self.usf.folders = json.encode(folders)
self.usf.save()
return folders
def process_outline(self, outline, folders, in_folder=''):
for item in outline:
if (not hasattr(item, 'xmlUrl') and
(hasattr(item, 'text') or hasattr(item, 'title'))):
folder = item
title = getattr(item, 'text', None) or getattr(item, 'title', None)
# if hasattr(folder, 'text'):
# logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text))
obj = {title: []}
folders = add_object_to_folder(obj, in_folder, folders)
folders = self.process_outline(folder, folders, title)
elif hasattr(item, 'xmlUrl'):
feed = item
if not hasattr(feed, 'htmlUrl'):
setattr(feed, 'htmlUrl', None)
# If feed title matches what's in the DB, don't override it on subscription.
feed_title = getattr(feed, 'title', None) or getattr(feed, 'text', None)
if not feed_title:
setattr(feed, 'title', feed.htmlUrl or feed.xmlUrl)
user_feed_title = None
else:
setattr(feed, 'title', feed_title)
user_feed_title = feed.title
feed_address = urlnorm.normalize(feed.xmlUrl)
feed_link = urlnorm.normalize(feed.htmlUrl)
if len(feed_address) > Feed._meta.get_field('feed_address').max_length:
continue
if feed_link and len(feed_link) > Feed._meta.get_field('feed_link').max_length:
continue
# logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,))
feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title)
# feeds.append(feed_data)
# See if it exists as a duplicate first
duplicate_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
if duplicate_feed:
feed_db = duplicate_feed[0].feed
else:
feed_data['active_subscribers'] = 1
feed_data['num_subscribers'] = 1
feed_db, _ = Feed.find_or_create(feed_address=feed_address,
feed_link=feed_link,
defaults=dict(**feed_data))
if user_feed_title == feed_db.feed_title:
user_feed_title = None
us, _ = UserSubscription.objects.get_or_create(
feed=feed_db,
user=self.user,
defaults={
'needs_unread_recalc': True,
'mark_read_date': datetime.datetime.utcnow() - datetime.timedelta(days=1),
'active': self.user.profile.is_premium,
'user_title': user_feed_title
}
)
if self.user.profile.is_premium and not us.active:
us.active = True
us.save()
if not us.needs_unread_recalc:
us.needs_unread_recalc = True
us.save()
folders = add_object_to_folder(feed_db.pk, in_folder, folders)
return folders
def count_feeds_in_opml(self):
opml_count = len(opml.from_string(self.opml_xml))
sub_count = UserSubscription.objects.filter(user=self.user).count()
return max(sub_count, opml_count)
class UploadedOPML(mongo.Document):
user_id = mongo.IntField()
opml_file = mongo.StringField()
upload_date = mongo.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
return "%s: %s characters" % (user.username, len(self.opml_file))
meta = {
'collection': 'uploaded_opml',
'allow_inheritance': False,
'order': '-upload_date',
'indexes': ['user_id', '-upload_date'],
}
class GoogleReaderImporter(Importer):
def __init__(self, user, xml=None):
self.user = user
self.scope = "http://www.google.com/reader/api"
self.xml = xml
self.auto_active = False
@timelimit(10)
def try_import_feeds(self, auto_active=False):
code = 0
try:
self.import_feeds(auto_active=auto_active)
self.import_starred_items(count=10)
except AssertionError:
code = -1
else:
code = 1
return code
def import_feeds(self, auto_active=False):
self.auto_active = auto_active
sub_url = "%s/0/subscription/list" % self.scope
if not self.xml:
feeds_xml = self.send_request(sub_url)
else:
feeds_xml = self.xml
if feeds_xml:
self.process_feeds(feeds_xml)
def send_request(self, url):
if not self.user.is_authenticated():
return
user_tokens = OAuthToken.objects.filter(user=self.user)
if user_tokens.count():
user_token = user_tokens[0]
if user_token.credential:
credential = pickle.loads(base64.b64decode(user_token.credential))
http = httplib2.Http()
http = credential.authorize(http)
content = http.request(url)
return content and content[1]
def process_feeds(self, feeds_xml):
# self.clear_feeds()
# self.clear_folders()
folders = self.get_folders()
self.feeds = self.parse(feeds_xml)
for item in self.feeds:
folders = self.process_item(item, folders)
logging.user(self.user, "~BB~FW~SBGoogle Reader import: ~BT~FW%s" % (folders))
self.usf.folders = json.encode(folders)
self.usf.save()
def parse(self, feeds_xml):
parser = etree.XMLParser(recover=True)
tree = etree.parse(StringIO(feeds_xml), parser)
feeds = tree.xpath('/object/list/object')
return feeds
def process_item(self, item, folders):
feed_title = item.xpath('./string[@name="title"]') and \
item.xpath('./string[@name="title"]')[0].text
feed_address = item.xpath('./string[@name="id"]') and \
item.xpath('./string[@name="id"]')[0].text.replace('feed/', '')
feed_link = item.xpath('./string[@name="htmlUrl"]') and \
item.xpath('./string[@name="htmlUrl"]')[0].text
category = item.xpath('./list[@name="categories"]/object/string[@name="label"]') and \
item.xpath('./list[@name="categories"]/object/string[@name="label"]')[0].text
if not feed_address:
feed_address = feed_link
try:
feed_link = urlnorm.normalize(feed_link)
feed_address = urlnorm.normalize(feed_address)
if len(feed_address) > Feed._meta.get_field('feed_address').max_length:
return folders
# See if it exists as a duplicate first
duplicate_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
if duplicate_feed:
feed_db = duplicate_feed[0].feed
else:
feed_data = dict(feed_title=feed_title)
feed_data['active_subscribers'] = 1
feed_data['num_subscribers'] = 1
feed_db, _ = Feed.find_or_create(feed_address=feed_address, feed_link=feed_link,
defaults=dict(**feed_data))
us, _ = UserSubscription.objects.get_or_create(
feed=feed_db,
user=self.user,
defaults={
'needs_unread_recalc': True,
'mark_read_date': datetime.datetime.utcnow() - datetime.timedelta(days=1),
'active': self.user.profile.is_premium or self.auto_active,
}
)
if not us.needs_unread_recalc:
us.needs_unread_recalc = True
us.save()
if not category: category = ""
if category:
obj = {category: []}
folders = add_object_to_folder(obj, '', folders)
folders = add_object_to_folder(feed_db.pk, category, folders)
# if feed_db.pk not in folders[category]:
# folders[category].append(feed_db.pk)
except Exception, e:
logging.info(' *** -> Exception: %s: %s' % (e, item))
return folders
def test(self):
sub_url = "%s/0/token" % (self.scope)
try:
resp = self.send_request(sub_url)
except OAuthError:
return False
return resp
@timelimit(120)
def try_import_starred_stories(self):
self.import_starred_items(count=1000)
starred_count = MStarredStory.objects.filter(user_id=self.user.pk).count()
return starred_count
def import_starred_items(self, count=10):
continuation = ''
while True:
if continuation:
sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s&c=%s" % (self.scope, count, continuation)
else:
sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s" % (self.scope, count)
stories_str = self.send_request(sub_url)
try:
stories = json.decode(stories_str)
continuation = stories.get('continuation')
except:
logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FWNo stories")
stories = None
if stories:
logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FW%s stories" % (len(stories['items'])))
self.process_starred_items(stories['items'])
if not continuation or count < 1000:
break
starred_count = MStarredStory.objects.filter(user_id=self.user.pk).count()
return starred_count
def process_starred_items(self, stories):
counts = {
'created': 0,
'existed': 0,
'failed': 0,
}
logging.user(self.user, "~FCBeginning starring...")
for story in stories:
try:
original_feed = Feed.get_feed_from_url(story['origin']['streamId'], create=False, fetch=False)
if not original_feed:
original_feed = Feed.get_feed_from_url(story['origin']['htmlUrl'], create=False, fetch=False)
content = story.get('content') or story.get('summary')
story_db = {
"user_id": self.user.pk,
"starred_date": datetime.datetime.fromtimestamp(story['updated']),
"story_date": datetime.datetime.fromtimestamp(story['published']),
"story_title": story.get('title', story.get('origin', {}).get('title', '[Untitled]')),
"story_permalink": story['alternate'][0]['href'],
"story_guid": story['id'],
"story_content": content.get('content'),
"story_author_name": story.get('author'),
"story_feed_id": original_feed and original_feed.pk,
"story_tags": [tag for tag in story.get('categories', []) if 'user/' not in tag]
}
# logging.user(self.user, "~FCStarring: ~SB%s~SN in ~SB%s" % (story_db['story_title'][:50], original_feed and original_feed))
MStarredStory.objects.create(**story_db)
counts['created'] += 1
except OperationError:
# logging.user(self.user, "~FCAlready starred: ~SB%s" % (story_db['story_title'][:50]))
counts['existed'] += 1
except Exception:
# logging.user(self.user, "~FC~BRFailed to star: ~SB%s / %s" % (story, e))
counts['failed'] += 1
logging.user(self.user, "~FCStarred: ~SB%s~SN/~SB%s%s~SN/~SB%s%s~SN" % (
counts['created'],
'~FM' if counts['existed'] else '~SN', counts['existed'],
'~FR' if counts['failed'] else '~SN', counts['failed']))
return counts
| |
"""
Example generation for the scikit learn
Generate the rst files for the examples by iterating over the python
example files.
Files that generate images should start with 'plot'
"""
from __future__ import division
from time import time
import os
import re
import shutil
import traceback
import glob
import sys
import gzip
import posixpath
import subprocess
# Try Python 2 first, otherwise load from Python 3
try:
from StringIO import StringIO
import cPickle as pickle
import urllib2 as urllib
from urllib2 import HTTPError, URLError
except ImportError:
from io import StringIO
import pickle
import urllib.request
import urllib.error
import urllib.parse
from urllib.error import HTTPError, URLError
try:
from PIL import Image
except ImportError:
import Image
import matplotlib
matplotlib.use('Agg')
import token
import tokenize
import numpy as np
from sklearn.externals import joblib
###############################################################################
# A tee object to redict streams to multiple outputs
class Tee(object):
def __init__(self, file1, file2):
self.file1 = file1
self.file2 = file2
def write(self, data):
self.file1.write(data)
self.file2.write(data)
def flush(self):
self.file1.flush()
self.file2.flush()
###############################################################################
# Documentation link resolver objects
def _get_data(url):
"""Helper function to get data over http or from a local file"""
if url.startswith('http://'):
# Try Python 2, use Python 3 on exception
try:
resp = urllib.urlopen(url)
encoding = resp.headers.dict.get('content-encoding', 'plain')
except AttributeError:
resp = urllib.request.urlopen(url)
encoding = resp.headers.get('content-encoding', 'plain')
data = resp.read()
if encoding == 'plain':
pass
elif encoding == 'gzip':
data = StringIO(data)
data = gzip.GzipFile(fileobj=data).read()
else:
raise RuntimeError('unknown encoding')
else:
with open(url, 'r') as fid:
data = fid.read()
fid.close()
return data
mem = joblib.Memory(cachedir='_build')
get_data = mem.cache(_get_data)
def parse_sphinx_searchindex(searchindex):
"""Parse a Sphinx search index
Parameters
----------
searchindex : str
The Sphinx search index (contents of searchindex.js)
Returns
-------
filenames : list of str
The file names parsed from the search index.
objects : dict
The objects parsed from the search index.
"""
def _select_block(str_in, start_tag, end_tag):
"""Select first block delimited by start_tag and end_tag"""
start_pos = str_in.find(start_tag)
if start_pos < 0:
raise ValueError('start_tag not found')
depth = 0
for pos in range(start_pos, len(str_in)):
if str_in[pos] == start_tag:
depth += 1
elif str_in[pos] == end_tag:
depth -= 1
if depth == 0:
break
sel = str_in[start_pos + 1:pos]
return sel
def _parse_dict_recursive(dict_str):
"""Parse a dictionary from the search index"""
dict_out = dict()
pos_last = 0
pos = dict_str.find(':')
while pos >= 0:
key = dict_str[pos_last:pos]
if dict_str[pos + 1] == '[':
# value is a list
pos_tmp = dict_str.find(']', pos + 1)
if pos_tmp < 0:
raise RuntimeError('error when parsing dict')
value = dict_str[pos + 2: pos_tmp].split(',')
# try to convert elements to int
for i in range(len(value)):
try:
value[i] = int(value[i])
except ValueError:
pass
elif dict_str[pos + 1] == '{':
# value is another dictionary
subdict_str = _select_block(dict_str[pos:], '{', '}')
value = _parse_dict_recursive(subdict_str)
pos_tmp = pos + len(subdict_str)
else:
raise ValueError('error when parsing dict: unknown elem')
key = key.strip('"')
if len(key) > 0:
dict_out[key] = value
pos_last = dict_str.find(',', pos_tmp)
if pos_last < 0:
break
pos_last += 1
pos = dict_str.find(':', pos_last)
return dict_out
# Make sure searchindex uses UTF-8 encoding
if hasattr(searchindex, 'decode'):
searchindex = searchindex.decode('UTF-8')
# parse objects
query = 'objects:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"objects:" not found in search index')
sel = _select_block(searchindex[pos:], '{', '}')
objects = _parse_dict_recursive(sel)
# parse filenames
query = 'filenames:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"filenames:" not found in search index')
filenames = searchindex[pos + len(query) + 1:]
filenames = filenames[:filenames.find(']')]
filenames = [f.strip('"') for f in filenames.split(',')]
return filenames, objects
class SphinxDocLinkResolver(object):
""" Resolve documentation links using searchindex.js generated by Sphinx
Parameters
----------
doc_url : str
The base URL of the project website.
searchindex : str
Filename of searchindex, relative to doc_url.
extra_modules_test : list of str
List of extra module names to test.
relative : bool
Return relative links (only useful for links to documentation of this
package).
"""
def __init__(self, doc_url, searchindex='searchindex.js',
extra_modules_test=None, relative=False):
self.doc_url = doc_url
self.relative = relative
self._link_cache = {}
self.extra_modules_test = extra_modules_test
self._page_cache = {}
if doc_url.startswith('http://'):
if relative:
raise ValueError('Relative links are only supported for local '
'URLs (doc_url cannot start with "http://)"')
searchindex_url = doc_url + '/' + searchindex
else:
searchindex_url = os.path.join(doc_url, searchindex)
# detect if we are using relative links on a Windows system
if os.name.lower() == 'nt' and not doc_url.startswith('http://'):
if not relative:
raise ValueError('You have to use relative=True for the local'
' package on a Windows system.')
self._is_windows = True
else:
self._is_windows = False
# download and initialize the search index
sindex = get_data(searchindex_url)
filenames, objects = parse_sphinx_searchindex(sindex)
self._searchindex = dict(filenames=filenames, objects=objects)
def _get_link(self, cobj):
"""Get a valid link, False if not found"""
fname_idx = None
full_name = cobj['module_short'] + '.' + cobj['name']
if full_name in self._searchindex['objects']:
value = self._searchindex['objects'][full_name]
if isinstance(value, dict):
value = value[value.keys()[0]]
fname_idx = value[0]
elif cobj['module_short'] in self._searchindex['objects']:
value = self._searchindex['objects'][cobj['module_short']]
if cobj['name'] in value.keys():
fname_idx = value[cobj['name']][0]
if fname_idx is not None:
fname = self._searchindex['filenames'][fname_idx] + '.html'
if self._is_windows:
fname = fname.replace('/', '\\')
link = os.path.join(self.doc_url, fname)
else:
link = posixpath.join(self.doc_url, fname)
if link in self._page_cache:
html = self._page_cache[link]
else:
html = get_data(link)
self._page_cache[link] = html
# test if cobj appears in page
comb_names = [cobj['module_short'] + '.' + cobj['name']]
if self.extra_modules_test is not None:
for mod in self.extra_modules_test:
comb_names.append(mod + '.' + cobj['name'])
url = False
for comb_name in comb_names:
if html.find(comb_name) >= 0:
url = link + '#' + comb_name
link = url
else:
link = False
return link
def resolve(self, cobj, this_url):
"""Resolve the link to the documentation, returns None if not found
Parameters
----------
cobj : dict
Dict with information about the "code object" for which we are
resolving a link.
cobi['name'] : function or class name (str)
cobj['module_short'] : shortened module name (str)
cobj['module'] : module name (str)
this_url: str
URL of the current page. Needed to construct relative URLs
(only used if relative=True in constructor).
Returns
-------
link : str | None
The link (URL) to the documentation.
"""
full_name = cobj['module_short'] + '.' + cobj['name']
link = self._link_cache.get(full_name, None)
if link is None:
# we don't have it cached
link = self._get_link(cobj)
# cache it for the future
self._link_cache[full_name] = link
if link is False or link is None:
# failed to resolve
return None
if self.relative:
link = os.path.relpath(link, start=this_url)
if self._is_windows:
# replace '\' with '/' so it on the web
link = link.replace('\\', '/')
# for some reason, the relative link goes one directory too high up
link = link[3:]
return link
###############################################################################
rst_template = """
.. _example_%(short_fname)s:
%(docstring)s
**Python source code:** :download:`%(fname)s <%(fname)s>`
.. literalinclude:: %(fname)s
:lines: %(end_row)s-
"""
plot_rst_template = """
.. _example_%(short_fname)s:
%(docstring)s
%(image_list)s
%(stdout)s
**Python source code:** :download:`%(fname)s <%(fname)s>`
.. literalinclude:: %(fname)s
:lines: %(end_row)s-
**Total running time of the example:** %(time_elapsed) .2f seconds
(%(time_m) .0f minutes %(time_s) .2f seconds)
"""
# The following strings are used when we have several pictures: we use
# an html div tag that our CSS uses to turn the lists into horizontal
# lists.
HLIST_HEADER = """
.. rst-class:: horizontal
"""
HLIST_IMAGE_TEMPLATE = """
*
.. image:: images/%s
:scale: 47
"""
SINGLE_IMAGE = """
.. image:: images/%s
:align: center
"""
# The following dictionary contains the information used to create the
# thumbnails for the front page of the scikit-learn home page.
# key: first image in set
# values: (number of plot in set, height of thumbnail)
carousel_thumbs = {'plot_classifier_comparison_1.png': (1, 600),
'plot_outlier_detection_1.png': (3, 372),
'plot_gp_regression_1.png': (2, 250),
'plot_adaboost_twoclass_1.png': (1, 372),
'plot_compare_methods_1.png': (1, 349)}
def extract_docstring(filename, ignore_heading=False):
""" Extract a module-level docstring, if any
"""
lines = open(filename).readlines()
start_row = 0
if lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
docstring = ''
first_par = ''
line_iterator = iter(lines)
tokens = tokenize.generate_tokens(lambda: next(line_iterator))
for tok_type, tok_content, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif tok_type == 'STRING':
docstring = eval(tok_content)
# If the docstring is formatted with several paragraphs, extract
# the first one:
paragraphs = '\n'.join(
line.rstrip() for line
in docstring.split('\n')).split('\n\n')
if paragraphs:
if ignore_heading:
if len(paragraphs) > 1:
first_par = re.sub('\n', ' ', paragraphs[1])
first_par = ((first_par[:95] + '...')
if len(first_par) > 95 else first_par)
else:
raise ValueError("Docstring not found by gallery",
"Please check your example's layout",
" and make sure it's correct")
else:
first_par = paragraphs[0]
break
return docstring, first_par, erow + 1 + start_row
def generate_example_rst(app):
""" Generate the list of examples, as well as the contents of
examples.
"""
root_dir = os.path.join(app.builder.srcdir, 'auto_examples')
example_dir = os.path.abspath(app.builder.srcdir + '/../' + 'examples')
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(example_dir):
os.makedirs(example_dir)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
# we create an index.rst with all examples
fhindex = open(os.path.join(root_dir, 'index.rst'), 'w')
#Note: The sidebar button has been removed from the examples page for now
# due to how it messes up the layout. Will be fixed at a later point
fhindex.write("""\
.. raw:: html
<style type="text/css">
div#sidebarbutton {
display: none;
}
.figure {
float: left;
margin: 10px;
-webkit-border-radius: 10px; /* Saf3-4, iOS 1-3.2, Android <1.6 */
-moz-border-radius: 10px; /* FF1-3.6 */
border-radius: 10px; /* Opera 10.5, IE9, Saf5, Chrome, FF4, iOS 4, Android 2.1+ */
border: 2px solid #fff;
background-color: white;
/* --> Thumbnail image size */
width: 150px;
height: 100px;
-webkit-background-size: 150px 100px; /* Saf3-4 */
-moz-background-size: 150px 100px; /* FF3.6 */
}
.figure img {
display: inline;
}
div.docstringWrapper p.caption {
display: block;
-webkit-box-shadow: 0px 0px 20px rgba(0, 0, 0, 0.0);
-moz-box-shadow: 0px 0px 20px rgba(0, 0, 0, .0); /* FF3.5 - 3.6 */
box-shadow: 0px 0px 20px rgba(0, 0, 0, 0.0); /* Opera 10.5, IE9, FF4+, Chrome 10+ */
padding: 0px;
border: white;
}
div.docstringWrapper p {
display: none;
background-color: white;
-webkit-box-shadow: 0px 0px 20px rgba(0, 0, 0, 1.00);
-moz-box-shadow: 0px 0px 20px rgba(0, 0, 0, 1.00); /* FF3.5 - 3.6 */
box-shadow: 0px 0px 20px rgba(0, 0, 0, 1.00); /* Opera 10.5, IE9, FF4+, Chrome 10+ */
padding: 13px;
margin-top: 0px;
border-style: solid;
border-width: 1px;
}
</style>
.. raw:: html
<script type="text/javascript">
function animateClone(e){
var position;
position = $(this).position();
var clone = $(this).closest('.thumbnailContainer').find('.clonedItem');
var clone_fig = clone.find('.figure');
clone.css("left", position.left - 70).css("top", position.top - 70).css("position", "absolute").css("z-index", 1000).css("background-color", "white");
var cloneImg = clone_fig.find('img');
clone.show();
clone.animate({
height: "270px",
width: "320px"
}, 0
);
cloneImg.css({
'max-height': "200px",
'max-width': "280px"
});
cloneImg.animate({
height: "200px",
width: "280px"
}, 0
);
clone_fig.css({
'margin-top': '20px',
});
clone_fig.show();
clone.find('p').css("display", "block");
clone_fig.css({
height: "240",
width: "305px"
});
cloneP_height = clone.find('p.caption').height();
clone_fig.animate({
height: (200 + cloneP_height)
}, 0
);
clone.bind("mouseleave", function(e){
clone.animate({
height: "100px",
width: "150px"
}, 10, function(){$(this).hide();});
clone_fig.animate({
height: "100px",
width: "150px"
}, 10, function(){$(this).hide();});
});
} //end animateClone()
$(window).load(function () {
$(".figure").css("z-index", 1);
$(".docstringWrapper").each(function(i, obj){
var clone;
var $obj = $(obj);
clone = $obj.clone();
clone.addClass("clonedItem");
clone.appendTo($obj.closest(".thumbnailContainer"));
clone.hide();
$obj.bind("mouseenter", animateClone);
}); // end each
}); // end
</script>
Examples
========
.. _examples-index:
""")
# Here we don't use an os.walk, but we recurse only twice: flat is
# better than nested.
generate_dir_rst('.', fhindex, example_dir, root_dir, plot_gallery)
for dir in sorted(os.listdir(example_dir)):
if os.path.isdir(os.path.join(example_dir, dir)):
generate_dir_rst(dir, fhindex, example_dir, root_dir, plot_gallery)
fhindex.flush()
def extract_line_count(filename, target_dir):
# Extract the line count of a file
example_file = os.path.join(target_dir, filename)
lines = open(example_file).readlines()
start_row = 0
if lines and lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
line_iterator = iter(lines)
tokens = tokenize.generate_tokens(lambda: next(line_iterator))
check_docstring = True
erow_docstring = 0
for tok_type, _, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif ((tok_type == 'STRING') and check_docstring):
erow_docstring = erow
check_docstring = False
return erow_docstring+1+start_row, erow+1+start_row
def line_count_sort(file_list, target_dir):
# Sort the list of examples by line-count
new_list = [x for x in file_list if x.endswith('.py')]
unsorted = np.zeros(shape=(len(new_list), 2))
unsorted = unsorted.astype(np.object)
for count, exmpl in enumerate(new_list):
docstr_lines, total_lines = extract_line_count(exmpl, target_dir)
unsorted[count][1] = total_lines - docstr_lines
unsorted[count][0] = exmpl
index = np.lexsort((unsorted[:, 0].astype(np.str),
unsorted[:, 1].astype(np.float)))
if not len(unsorted):
return []
return np.array(unsorted[index][:, 0]).tolist()
def generate_dir_rst(dir, fhindex, example_dir, root_dir, plot_gallery):
""" Generate the rst file for an example directory.
"""
if not dir == '.':
target_dir = os.path.join(root_dir, dir)
src_dir = os.path.join(example_dir, dir)
else:
target_dir = root_dir
src_dir = example_dir
if not os.path.exists(os.path.join(src_dir, 'README.txt')):
print(80 * '_')
print('Example directory %s does not have a README.txt file' %
src_dir)
print('Skipping this directory')
print(80 * '_')
return
fhindex.write("""
%s
""" % open(os.path.join(src_dir, 'README.txt')).read())
if not os.path.exists(target_dir):
os.makedirs(target_dir)
sorted_listdir = line_count_sort(os.listdir(src_dir),
src_dir)
if not os.path.exists(os.path.join(dir, 'images', 'thumb')):
os.makedirs(os.path.join(dir, 'images', 'thumb'))
for fname in sorted_listdir:
if fname.endswith('py'):
generate_file_rst(fname, target_dir, src_dir, root_dir, plot_gallery)
new_fname = os.path.join(src_dir, fname)
_, fdocstring, _ = extract_docstring(new_fname, True)
thumb = os.path.join(dir, 'images', 'thumb', fname[:-3] + '.png')
link_name = os.path.join(dir, fname).replace(os.path.sep, '_')
fhindex.write("""
.. raw:: html
<div class="thumbnailContainer">
<div class="docstringWrapper">
""")
fhindex.write('.. figure:: %s\n' % thumb)
if link_name.startswith('._'):
link_name = link_name[2:]
if dir != '.':
fhindex.write(' :target: ./%s/%s.html\n\n' % (dir,
fname[:-3]))
else:
fhindex.write(' :target: ./%s.html\n\n' % link_name[:-3])
fhindex.write(""" :ref:`example_%s`
.. raw:: html
<p>%s
</p></div>
</div>
.. toctree::
:hidden:
%s/%s
""" % (link_name, fdocstring, dir, fname[:-3]))
fhindex.write("""
.. raw:: html
<div style="clear: both"></div>
""") # clear at the end of the section
# modules for which we embed links into example code
DOCMODULES = ['sklearn', 'matplotlib', 'numpy', 'scipy']
def make_thumbnail(in_fname, out_fname, width, height):
"""Make a thumbnail with the same aspect ratio centered in an
image with a given width and height
"""
img = Image.open(in_fname)
width_in, height_in = img.size
scale_w = width / float(width_in)
scale_h = height / float(height_in)
if height_in * scale_w <= height:
scale = scale_w
else:
scale = scale_h
width_sc = int(round(scale * width_in))
height_sc = int(round(scale * height_in))
# resize the image
img.thumbnail((width_sc, height_sc), Image.ANTIALIAS)
# insert centered
thumb = Image.new('RGB', (width, height), (255, 255, 255))
pos_insert = ((width - width_sc) // 2, (height - height_sc) // 2)
thumb.paste(img, pos_insert)
thumb.save(out_fname)
# Use optipng to perform lossless compression on the resized image if
# software is installed
if os.environ.get('SKLEARN_DOC_OPTIPNG', False):
try:
subprocess.call(["optipng", "-quiet", "-o", "9", out_fname])
except Exception:
warnings.warn('Install optipng to reduce the size of the generated images')
def get_short_module_name(module_name, obj_name):
""" Get the shortest possible module name """
parts = module_name.split('.')
short_name = module_name
for i in range(len(parts) - 1, 0, -1):
short_name = '.'.join(parts[:i])
try:
exec('from %s import %s' % (short_name, obj_name))
except ImportError:
# get the last working module name
short_name = '.'.join(parts[:(i + 1)])
break
return short_name
def generate_file_rst(fname, target_dir, src_dir, root_dir, plot_gallery):
""" Generate the rst file for a given example.
"""
base_image_name = os.path.splitext(fname)[0]
image_fname = '%s_%%s.png' % base_image_name
this_template = rst_template
last_dir = os.path.split(src_dir)[-1]
# to avoid leading . in file names, and wrong names in links
if last_dir == '.' or last_dir == 'examples':
last_dir = ''
else:
last_dir += '_'
short_fname = last_dir + fname
src_file = os.path.join(src_dir, fname)
example_file = os.path.join(target_dir, fname)
shutil.copyfile(src_file, example_file)
# The following is a list containing all the figure names
figure_list = []
image_dir = os.path.join(target_dir, 'images')
thumb_dir = os.path.join(image_dir, 'thumb')
if not os.path.exists(image_dir):
os.makedirs(image_dir)
if not os.path.exists(thumb_dir):
os.makedirs(thumb_dir)
image_path = os.path.join(image_dir, image_fname)
stdout_path = os.path.join(image_dir,
'stdout_%s.txt' % base_image_name)
time_path = os.path.join(image_dir,
'time_%s.txt' % base_image_name)
thumb_file = os.path.join(thumb_dir, fname[:-3] + '.png')
time_elapsed = 0
time_m = 0
time_s = 0
if plot_gallery and fname.startswith('plot'):
# generate the plot as png image if file name
# starts with plot and if it is more recent than an
# existing image.
first_image_file = image_path % 1
if os.path.exists(stdout_path):
stdout = open(stdout_path).read()
else:
stdout = ''
if os.path.exists(time_path):
time_elapsed = float(open(time_path).read())
if not os.path.exists(first_image_file) or \
os.stat(first_image_file).st_mtime <= os.stat(src_file).st_mtime:
# We need to execute the code
print('plotting %s' % fname)
t0 = time()
import matplotlib.pyplot as plt
plt.close('all')
cwd = os.getcwd()
try:
# First CD in the original example dir, so that any file
# created by the example get created in this directory
orig_stdout = sys.stdout
os.chdir(os.path.dirname(src_file))
my_buffer = StringIO()
my_stdout = Tee(sys.stdout, my_buffer)
sys.stdout = my_stdout
my_globals = {'pl': plt}
execfile(os.path.basename(src_file), my_globals)
time_elapsed = time() - t0
sys.stdout = orig_stdout
my_stdout = my_buffer.getvalue()
# get variables so we can later add links to the documentation
example_code_obj = {}
for var_name, var in my_globals.items():
if not hasattr(var, '__module__'):
continue
if not isinstance(var.__module__, basestring):
continue
if var.__module__.split('.')[0] not in DOCMODULES:
continue
# get the type as a string with other things stripped
tstr = str(type(var))
tstr = (tstr[tstr.find('\'')
+ 1:tstr.rfind('\'')].split('.')[-1])
# get shortened module name
module_short = get_short_module_name(var.__module__,
tstr)
cobj = {'name': tstr, 'module': var.__module__,
'module_short': module_short,
'obj_type': 'object'}
example_code_obj[var_name] = cobj
# find functions so we can later add links to the documentation
funregex = re.compile('[\w.]+\(')
with open(src_file, 'rt') as fid:
for line in fid.readlines():
if line.startswith('#'):
continue
for match in funregex.findall(line):
fun_name = match[:-1]
try:
exec('this_fun = %s' % fun_name, my_globals)
except Exception as err:
# Here, we were not able to execute the
# previous statement, either because the
# fun_name was not a function but a statement
# (print), or because the regexp didn't
# catch the whole function name :
# eg:
# X = something().blah()
# will work for something, but not blah.
continue
this_fun = my_globals['this_fun']
if not callable(this_fun):
continue
if not hasattr(this_fun, '__module__'):
continue
if not isinstance(this_fun.__module__, basestring):
continue
if (this_fun.__module__.split('.')[0]
not in DOCMODULES):
continue
# get shortened module name
fun_name_short = fun_name.split('.')[-1]
module_short = get_short_module_name(
this_fun.__module__, fun_name_short)
cobj = {'name': fun_name_short,
'module': this_fun.__module__,
'module_short': module_short,
'obj_type': 'function'}
example_code_obj[fun_name] = cobj
fid.close()
if len(example_code_obj) > 0:
# save the dictionary, so we can later add hyperlinks
codeobj_fname = example_file[:-3] + '_codeobj.pickle'
with open(codeobj_fname, 'wb') as fid:
pickle.dump(example_code_obj, fid, pickle.HIGHEST_PROTOCOL)
fid.close()
if '__doc__' in my_globals:
# The __doc__ is often printed in the example, we
# don't with to echo it
my_stdout = my_stdout.replace(
my_globals['__doc__'],
'')
my_stdout = my_stdout.strip()
if my_stdout:
stdout = '**Script output**::\n\n %s\n\n' % (
'\n '.join(my_stdout.split('\n')))
open(stdout_path, 'w').write(stdout)
open(time_path, 'w').write('%f' % time_elapsed)
os.chdir(cwd)
# In order to save every figure we have two solutions :
# * iterate from 1 to infinity and call plt.fignum_exists(n)
# (this requires the figures to be numbered
# incrementally: 1, 2, 3 and not 1, 2, 5)
# * iterate over [fig_mngr.num for fig_mngr in
# matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
for fig_num in (fig_mngr.num for fig_mngr in
matplotlib._pylab_helpers.Gcf.get_all_fig_managers()):
# Set the fig_num figure as the current figure as we can't
# save a figure that's not the current figure.
plt.figure(fig_num)
plt.savefig(image_path % fig_num)
figure_list.append(image_fname % fig_num)
except:
print(80 * '_')
print('%s is not compiling:' % fname)
traceback.print_exc()
print(80 * '_')
finally:
os.chdir(cwd)
sys.stdout = orig_stdout
print(" - time elapsed : %.2g sec" % time_elapsed)
else:
figure_list = [f[len(image_dir):]
for f in glob.glob(image_path % '[1-9]')]
#for f in glob.glob(image_path % '*')]
# generate thumb file
this_template = plot_rst_template
car_thumb_path = os.path.join(os.path.split(root_dir)[0], '_build/html/stable/_images/')
# Note: normaly, make_thumbnail is used to write to the path contained in `thumb_file`
# which is within `auto_examples/../images/thumbs` depending on the example.
# Because the carousel has different dimensions than those of the examples gallery,
# I did not simply reuse them all as some contained whitespace due to their default gallery
# thumbnail size. Below, for a few cases, seperate thumbnails are created (the originals can't
# just be overwritten with the carousel dimensions as it messes up the examples gallery layout).
# The special carousel thumbnails are written directly to _build/html/stable/_images/,
# as for some reason unknown to me, Sphinx refuses to copy my 'extra' thumbnails from the
# auto examples gallery to the _build folder. This works fine as is, but it would be cleaner to
# have it happen with the rest. Ideally the should be written to 'thumb_file' as well, and then
# copied to the _images folder during the `Copying Downloadable Files` step like the rest.
if not os.path.exists(car_thumb_path):
os.makedirs(car_thumb_path)
if os.path.exists(first_image_file):
# We generate extra special thumbnails for the carousel
carousel_tfile = os.path.join(car_thumb_path, fname[:-3] + '_carousel.png')
first_img = image_fname % 1
if first_img in carousel_thumbs:
make_thumbnail((image_path % carousel_thumbs[first_img][0]),
carousel_tfile, carousel_thumbs[first_img][1], 190)
make_thumbnail(first_image_file, thumb_file, 400, 280)
if not os.path.exists(thumb_file):
# create something to replace the thumbnail
make_thumbnail('images/no_image.png', thumb_file, 200, 140)
docstring, short_desc, end_row = extract_docstring(example_file)
# Depending on whether we have one or more figures, we're using a
# horizontal list or a single rst call to 'image'.
if len(figure_list) == 1:
figure_name = figure_list[0]
image_list = SINGLE_IMAGE % figure_name.lstrip('/')
else:
image_list = HLIST_HEADER
for figure_name in figure_list:
image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/')
time_m, time_s = divmod(time_elapsed, 60)
f = open(os.path.join(target_dir, fname[:-2] + 'rst'), 'w')
f.write(this_template % locals())
f.flush()
def embed_code_links(app, exception):
"""Embed hyperlinks to documentation into example code"""
try:
if exception is not None:
return
print('Embedding documentation hyperlinks in examples..')
# Add resolvers for the packages for which we want to show links
doc_resolvers = {}
doc_resolvers['sklearn'] = SphinxDocLinkResolver(app.builder.outdir,
relative=True)
doc_resolvers['matplotlib'] = SphinxDocLinkResolver(
'http://matplotlib.org')
doc_resolvers['numpy'] = SphinxDocLinkResolver(
'http://docs.scipy.org/doc/numpy-1.6.0')
doc_resolvers['scipy'] = SphinxDocLinkResolver(
'http://docs.scipy.org/doc/scipy-0.11.0/reference')
example_dir = os.path.join(app.builder.srcdir, 'auto_examples')
html_example_dir = os.path.abspath(os.path.join(app.builder.outdir,
'auto_examples'))
# patterns for replacement
link_pattern = '<a href="%s">%s</a>'
orig_pattern = '<span class="n">%s</span>'
period = '<span class="o">.</span>'
for dirpath, _, filenames in os.walk(html_example_dir):
for fname in filenames:
print('\tprocessing: %s' % fname)
full_fname = os.path.join(html_example_dir, dirpath, fname)
subpath = dirpath[len(html_example_dir) + 1:]
pickle_fname = os.path.join(example_dir, subpath,
fname[:-5] + '_codeobj.pickle')
if os.path.exists(pickle_fname):
# we have a pickle file with the objects to embed links for
with open(pickle_fname, 'rb') as fid:
example_code_obj = pickle.load(fid)
fid.close()
str_repl = {}
# generate replacement strings with the links
for name, cobj in example_code_obj.items():
this_module = cobj['module'].split('.')[0]
if this_module not in doc_resolvers:
continue
link = doc_resolvers[this_module].resolve(cobj,
full_fname)
if link is not None:
parts = name.split('.')
name_html = orig_pattern % parts[0]
for part in parts[1:]:
name_html += period + orig_pattern % part
str_repl[name_html] = link_pattern % (link, name_html)
# do the replacement in the html file
if len(str_repl) > 0:
with open(full_fname, 'rb') as fid:
lines_in = fid.readlines()
with open(full_fname, 'wb') as fid:
for line in lines_in:
line = line.decode('utf-8')
for name, link in str_repl.iteritems():
line = line.replace(name, link)
fid.write(line.encode('utf-8'))
except HTTPError as e:
print("The following HTTP Error has occurred:\n")
print(e.code)
except URLError as e:
print("\n...\n"
"Warning: Embedding the documentation hyperlinks requires "
"internet access.\nPlease check your network connection.\n"
"Unable to continue embedding due to a URL Error: \n")
print(e.args)
print('[done]')
def setup(app):
app.connect('builder-inited', generate_example_rst)
app.add_config_value('plot_gallery', True, 'html')
# embed links after build is finished
app.connect('build-finished', embed_code_links)
# Sphinx hack: sphinx copies generated images to the build directory
# each time the docs are made. If the desired image name already
# exists, it appends a digit to prevent overwrites. The problem is,
# the directory is never cleared. This means that each time you build
# the docs, the number of images in the directory grows.
#
# This question has been asked on the sphinx development list, but there
# was no response: http://osdir.com/ml/sphinx-dev/2011-02/msg00123.html
#
# The following is a hack that prevents this behavior by clearing the
# image build directory each time the docs are built. If sphinx
# changes their layout between versions, this will not work (though
# it should probably not cause a crash). Tested successfully
# on Sphinx 1.0.7
build_image_dir = '_build/html/_images'
if os.path.exists(build_image_dir):
filelist = os.listdir(build_image_dir)
for filename in filelist:
if filename.endswith('png'):
os.remove(os.path.join(build_image_dir, filename))
| |
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import time
import fixtures
from botocore.exceptions import ClientError as BotoClientError
from botocore import session as botocore_session
from glanceclient import client as glance_client
from keystoneauth1.identity import v3 as identity_v3
from keystoneauth1 import session
from neutronclient.neutron import client as neutron_client
from novaclient import client as nova_client
from novaclient import exceptions as nova_exc
from oslo_utils import uuidutils
from saharaclient.api import base as saharaclient_base
from saharaclient import client as sahara_client
import six
from swiftclient import client as swift_client
from swiftclient import exceptions as swift_exc
from tempest.lib import exceptions as exc
from sahara_tests.scenario import timeouts
from sahara_tests.scenario import utils
def get_session(auth_url=None, username=None, password=None,
project_name=None, verify=True, cert=None):
auth_url_fixed = auth_url.replace('/v2.0', '/v3')
if not auth_url_fixed.endswith('/v3'):
auth_url_fixed += '/v3'
auth = identity_v3.Password(auth_url=auth_url_fixed,
username=username,
password=password,
project_name=project_name,
user_domain_name='default',
project_domain_name='default')
return session.Session(auth=auth, verify=verify, cert=cert)
class Client(object):
def is_resource_deleted(self, method, *args, **kwargs):
raise NotImplementedError
def delete_resource(self, method, *args, **kwargs):
with fixtures.Timeout(
timeouts.Defaults.instance.timeout_delete_resource,
gentle=True):
while True:
if self.is_resource_deleted(method, *args, **kwargs):
break
time.sleep(5)
class SaharaClient(Client):
def __init__(self, *args, **kwargs):
self.api_version = '1.1'
if 'api_version' in kwargs:
self.api_version = kwargs['api_version']
del kwargs['api_version']
self.sahara_client = sahara_client.Client(self.api_version, *args,
**kwargs)
def create_node_group_template(self, *args, **kwargs):
data = self.sahara_client.node_group_templates.create(*args, **kwargs)
return data.id
def delete_node_group_template(self, node_group_template_id):
return self.delete_resource(
self.sahara_client.node_group_templates.delete,
node_group_template_id)
def create_cluster_template(self, *args, **kwargs):
data = self.sahara_client.cluster_templates.create(*args, **kwargs)
return data.id
def delete_cluster_template(self, cluster_template_id):
return self.delete_resource(
self.sahara_client.cluster_templates.delete,
cluster_template_id)
def create_cluster(self, *args, **kwargs):
data = self.sahara_client.clusters.create(*args, **kwargs)
return data.id
def delete_cluster(self, cluster_id):
return self.delete_resource(
self.sahara_client.clusters.delete,
cluster_id)
def scale_cluster(self, cluster_id, body):
return self.sahara_client.clusters.scale(cluster_id, body)
def start_cluster_verification(self, cluster_id):
return self.sahara_client.clusters.verification_update(cluster_id,
'START')
def create_datasource(self, *args, **kwargs):
data = self.sahara_client.data_sources.create(*args, **kwargs)
return data.id
def get_datasource(self, *args, **kwargs):
return self.sahara_client.data_sources.get(*args, **kwargs)
def delete_datasource(self, datasource_id):
return self.delete_resource(
self.sahara_client.data_sources.delete,
datasource_id)
def create_job_binary_internal(self, *args, **kwargs):
data = self.sahara_client.job_binary_internals.create(*args, **kwargs)
return data.id
def delete_job_binary_internal(self, job_binary_internal_id):
return self.delete_resource(
self.sahara_client.job_binary_internals.delete,
job_binary_internal_id)
def create_job_binary(self, *args, **kwargs):
data = self.sahara_client.job_binaries.create(*args, **kwargs)
return data.id
def delete_job_binary(self, job_binary_id):
return self.delete_resource(
self.sahara_client.job_binaries.delete,
job_binary_id)
def create_job_template(self, *args, **kwargs):
if self.api_version == '1.1':
data = self.sahara_client.jobs.create(*args, **kwargs)
else:
data = self.sahara_client.job_templates.create(*args, **kwargs)
return data.id
def delete_job_template(self, job_id):
if self.api_version == '1.1':
delete_function = self.sahara_client.jobs.delete
else:
delete_function = self.sahara_client.job_templates.delete
return self.delete_resource(delete_function, job_id)
def run_job(self, *args, **kwargs):
if self.api_version == '1.1':
data = self.sahara_client.job_executions.create(*args, **kwargs)
else:
data = self.sahara_client.jobs.create(*args, **kwargs)
return data.id
def delete_job_execution(self, job_execution_id):
if self.api_version == '1.1':
delete_function = self.sahara_client.job_executions.delete
else:
delete_function = self.sahara_client.jobs.delete
return self.delete_resource(delete_function, job_execution_id)
def get_cluster(self, cluster_id, show_progress=False):
return self.sahara_client.clusters.get(cluster_id, show_progress)
def get_cluster_status(self, cluster_id):
data = self.sahara_client.clusters.get(cluster_id)
return str(data.status)
def get_job_status(self, exec_id):
if self.api_version == '1.1':
data = self.sahara_client.job_executions.get(exec_id)
else:
data = self.sahara_client.jobs.get(exec_id)
return str(data.info['status'])
def get_job_info(self, exec_id):
if self.api_version == '1.1':
job_execution = self.sahara_client.job_executions.get(exec_id)
else:
job_execution = self.sahara_client.jobs.get(exec_id)
return self.sahara_client.jobs.get(job_execution.job_id)
def get_cluster_id(self, name):
if uuidutils.is_uuid_like(name):
return name
for cluster in self.sahara_client.clusters.list():
if cluster.name == name:
return cluster.id
def get_node_group_template_id(self, name):
for nodegroup in self.sahara_client.node_group_templates.list():
if nodegroup.name == name:
return nodegroup.id
def register_image(self, image_id, testcase):
try:
return self.sahara_client.images.get(image_id)
except saharaclient_base.APIException:
print("Image not registered in sahara. Registering and run tests")
if testcase.get('image_username') is not None:
self.sahara_client.images.update_image(
image_id, testcase.get('image_username'),
"Registered by scenario tests")
self.sahara_client.images.update_tags(
image_id, [testcase["plugin_name"],
testcase["plugin_version"]])
else:
raise exc.InvalidContentType(
"Registering of image failed. Please, specify "
"'image_username'. For details see README in scenario "
"tests.")
return self.sahara_client.images.get(image_id)
def is_resource_deleted(self, method, *args, **kwargs):
try:
method(*args, **kwargs)
except saharaclient_base.APIException as ex:
return ex.error_code == 404
return False
class NovaClient(Client):
def __init__(self, *args, **kwargs):
self.nova_client = nova_client.Client('2', *args, **kwargs)
def get_flavor_id(self, flavor_name):
if (uuidutils.is_uuid_like(flavor_name) or
(isinstance(flavor_name, six.string_types) and
flavor_name.isdigit())):
return flavor_name
for flavor in self.nova_client.flavors.list():
if flavor.name == flavor_name:
return flavor.id
raise exc.NotFound(flavor_name)
def create_flavor(self, flavor_object):
return self.nova_client.flavors.create(
flavor_object.get('name', utils.rand_name('scenario')),
flavor_object.get('ram', 1),
flavor_object.get('vcpus', 1),
flavor_object.get('root_disk', 0),
ephemeral=flavor_object.get('ephemeral_disk', 0),
swap=flavor_object.get('swap_disk', 0),
flavorid=flavor_object.get('id', 'auto'))
def delete_flavor(self, flavor_id):
return self.delete_resource(self.nova_client.flavors.delete, flavor_id)
def delete_keypair(self, key_name):
return self.delete_resource(
self.nova_client.keypairs.delete, key_name)
def is_resource_deleted(self, method, *args, **kwargs):
try:
method(*args, **kwargs)
except nova_exc.NotFound as ex:
return ex.code == 404
return False
class NeutronClient(Client):
def __init__(self, *args, **kwargs):
self.neutron_client = neutron_client.Client('2.0', *args, **kwargs)
def get_network_id(self, network_name):
if uuidutils.is_uuid_like(network_name):
return network_name
networks = self.neutron_client.list_networks(name=network_name)
networks = networks['networks']
if len(networks) < 1:
raise exc.NotFound(network_name)
return networks[0]['id']
def create_security_group_for_neutron(self, sg_name):
security_group = self.neutron_client.create_security_group({
"security_group":
{
"name": sg_name,
"description": "Just for test"
}
})
return security_group['security_group']['id']
def get_security_group_id(self, sg_name):
for sg in (self.neutron_client.list_security_groups()
["security_groups"]):
if sg['name'] == sg_name:
return sg['id']
raise exc.NotFound(sg_name)
def add_security_group_rule_for_neutron(self, sg_id):
return self.neutron_client.create_security_group_rule({
"security_group_rules": [
{
"direction": "ingress",
"ethertype": "IPv4",
"port_range_max": 65535,
"port_range_min": 1,
"protocol": "TCP",
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": sg_id
},
{
"direction": "egress",
"ethertype": "IPv4",
"port_range_max": 65535,
"port_range_min": 1,
"protocol": "TCP",
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": sg_id
}
]
})
def delete_security_group_for_neutron(self, sg_id):
return self.neutron_client.delete_security_group(sg_id)
class SwiftClient(Client):
def __init__(self, *args, **kwargs):
self.swift_client = swift_client.Connection(*args, **kwargs)
def create_container(self, container_name):
return self.swift_client.put_container(container_name)
def delete_container(self, container_name):
objects = self._get_objects(container_name)
for obj in objects:
self.delete_object(container_name, obj)
return self.delete_resource(
self.swift_client.delete_container, container_name)
def _get_objects(self, container_name):
metadata = self.swift_client.get_container(container_name)
objects = []
for obj in metadata[1]:
objects.append(obj['name'])
return objects[::-1]
def upload_data(self, container_name, object_name, data):
return self.swift_client.put_object(container_name, object_name, data)
def delete_object(self, container_name, object_name):
return self.delete_resource(
self.swift_client.delete_object,
container_name,
object_name)
def is_resource_deleted(self, method, *args, **kwargs):
try:
method(*args, **kwargs)
except swift_exc.ClientException as ex:
return ex.http_status == 404
return False
class BotoClient(Client):
def __init__(self, *args, **kwargs):
sess = botocore_session.get_session()
self.boto_client = sess.create_client(
's3',
endpoint_url=kwargs['endpoint'],
aws_access_key_id=kwargs['accesskey'],
aws_secret_access_key=kwargs['secretkey']
)
def create_bucket(self, bucket_name):
return self.boto_client.create_bucket(Bucket=bucket_name)
def _delete_and_check_bucket(self, bucket_name):
bucket_deleted = False
operation_parameters = {'Bucket': bucket_name}
try:
# While list_objects_v2 is the suggested function, pagination
# does not seems to work properly with RadosGW when it's used.
paginator = self.boto_client.get_paginator('list_objects')
page_iterator = paginator.paginate(**operation_parameters)
for page in page_iterator:
if 'Contents' not in page:
continue
for item in page['Contents']:
self.boto_client.delete_object(Bucket=bucket_name,
Key=item['Key'])
self.boto_client.delete_bucket(Bucket=bucket_name)
except BotoClientError as ex:
error = ex.response.get('Error', {})
# without the conversion the value is a tuple
error_code = '%s' % (error.get('Code', ''))
if error_code == 'NoSuchBucket':
bucket_deleted = True
return bucket_deleted
def delete_bucket(self, bucket_name):
return self.delete_resource(
self._delete_and_check_bucket, bucket_name)
def upload_data(self, bucket_name, object_name, data):
return self.boto_client.put_object(
Bucket=bucket_name,
Key=object_name,
Body=data)
def _delete_and_check_object(self, bucket_name, object_name):
self.boto_client.delete_object(Bucket=bucket_name, Key=object_name)
object_deleted = False
try:
self.boto_client.head_object(Bucket=bucket_name, Key=object_name)
except BotoClientError as ex:
error = ex.response.get('Error', {})
# without the conversion the value is a tuple
error_code = '%s' % (error.get('Code', ''))
if error_code == '404':
object_deleted = True
return object_deleted
def delete_object(self, bucket_name, object_name):
return self.delete_resource(
self._delete_and_check_object,
bucket_name, object_name)
def is_resource_deleted(self, method, *args, **kwargs):
# Exceptions are handled directly inside the call to "method",
# because they are not the same for objects and buckets.
return method(*args, **kwargs)
class GlanceClient(Client):
def __init__(self, *args, **kwargs):
self.glance_client = glance_client.Client('2', *args, **kwargs)
def get_image_id(self, image_name):
if uuidutils.is_uuid_like(image_name):
return image_name
for image in self.glance_client.images.list():
if image.name == image_name:
return image.id
raise exc.NotFound(image_name)
| |
from django.conf import settings
from django.core import validators
from django.db import models
from django.template.defaultfilters import slugify
from django.urls import reverse
from django.utils.functional import lazy
from django.utils.text import format_lazy
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils.timezone import now
import reversion
from markitup.fields import MarkupField
from reversion.models import Version
from wafer.kv.models import KeyValue
# constants to make things clearer elsewhere
SUBMITTED = 'S'
UNDER_CONSIDERATION = 'U'
PROVISIONAL = 'P'
ACCEPTED = 'A'
REJECTED = 'R'
CANCELLED = 'C'
WITHDRAWN = 'W'
# Utility functions used in the forms
# We define this here, rather than in the users model, to avoid import
# loops as we need to import talks into users
def render_author(author):
return '%s (%s)' % (author.userprofile.display_name(), author)
def authors_help():
_ = ugettext # This function will be wrapped for lazy evaluation
text = []
text.append(_("The speakers presenting the talk."))
if not settings.WAFER_PUBLIC_ATTENDEE_LIST:
text.append(_(
"To ensure attendee privacy, you will only be able to see "
"yourself and authors that have been added to the talk by the "
"conference organisers. "
"If you will have other co-authors, add a note in the notes "
"field, so the organisers can add them to your talk."
))
text.append(_(
"<strong>You, as the talk submitter, will be the talk's corresponding "
"author.</strong>"
))
return ' '.join(text)
class TalkTypeManager(models.Manager):
def open_for_submission(self):
Q = models.Q
return self.filter(
Q(disable_submission=False) &
(
Q(submission_deadline__isnull=True) |
Q(submission_deadline__gt=now()) |
Q(accept_late_submissions=True)
)
)
class TalkType(models.Model):
"""A type of talk."""
name = models.CharField(_('name'), max_length=255)
description = models.TextField(_('description'), max_length=1024)
order = models.IntegerField(_('order'), default=1)
disable_submission = models.BooleanField(
_('disable submission'),
default=False,
help_text="Don't allow users to submit talks of this type.")
submission_deadline = models.DateTimeField(
_('submission deadline'),
null=True,
blank=True,
help_text=_("Deadline for submitting talks of this type")
)
accept_late_submissions = models.BooleanField(
_('accept late submissions'),
default=False,
help_text=_("Whether submissions after the deadline should be accepted")
)
show_speakers = models.BooleanField(
_('Show authors in speakers list'),
default=True,
help_text=_("Whether to show the authors for this talk type in the speakers list")
)
objects = TalkTypeManager()
def __str__(self):
return u'%s' % (self.name,)
class Meta:
ordering = ['order', 'id']
verbose_name = _('talk type')
verbose_name_plural = _('talk types')
def css_class(self):
"""Return a string for use as a css class name"""
# While css can represent complicated strings
# using escaping, we want simplicity and obvious predictablity
return u'talk-type-%s' % slugify(self.name)
css_class.admin_order_field = 'name'
css_class.short_description = _('CSS class name')
class Track(models.Model):
"""A conference track."""
name = models.CharField(_('name'), max_length=255)
description = models.TextField(_('description'), max_length=1024)
order = models.IntegerField(_('order'), default=1)
def __str__(self):
return u'%s' % (self.name,)
class Meta:
ordering = ['order', 'id']
verbose_name = _('track')
verbose_name_plural = _('tracks')
def css_class(self):
"""Return a string for use as a css class name"""
# While css can represent complicated strings
# using escaping, we want simplicity and obvious predictablity
return u'track-%s' % slugify(self.name)
css_class.admin_order_field = 'name'
css_class.short_description = _('CSS class name')
@reversion.register(follow=('urls',))
class Talk(models.Model):
class Meta:
permissions = (
("view_all_talks", "Can see all talks"),
("edit_private_notes", "Can edit the private notes fields"),
)
verbose_name = _('talk')
verbose_name_plural = _('talks')
TALK_STATUS = (
(ACCEPTED, _('Accepted')),
(REJECTED, _('Not accepted')),
(CANCELLED, _('Cancelled')),
(UNDER_CONSIDERATION, _('Under consideration')),
(SUBMITTED, _('Submitted')),
(PROVISIONAL, _('Provisionally Accepted')),
(WITHDRAWN, _('Withdrawn')),
)
talk_id = models.AutoField(primary_key=True)
talk_type = models.ForeignKey(
TalkType, verbose_name=_("talk type"), null=True, blank=True, on_delete=models.SET_NULL)
track = models.ForeignKey(
Track, verbose_name=_("track"), null=True, blank=True, on_delete=models.SET_NULL)
LANGUAGES = settings.WAFER_TALK_LANGUAGES
# DEFAULT_LANGUAGE should be None if WAFER_TALK_LANGUAGES is empty
DEFAULT_LANGUAGE = LANGUAGES[0][0] if LANGUAGES else None
language = models.CharField(
verbose_name=_("language"),
max_length=5,
null=True,
blank=True,
choices=LANGUAGES,
default=DEFAULT_LANGUAGE,
)
title = models.CharField(_("title"), max_length=1024)
abstract = MarkupField(
_("abstract"),
help_text=_("Write two or three paragraphs describing your talk. "
"Who is your audience? What will they get out of it? "
"What will you cover?<br />"
"You can use Markdown syntax."))
notes = models.TextField(
_("notes"),
null=True, blank=True,
help_text=_(
"Any notes for the conference? Such as additional background on "
"the topic or presenters that isn't going to be published "
"publicly, special requirements for the event, or thoughts on "
"scheduling. These are not visible to the public."))
private_notes = models.TextField(
_("private notes"),
null=True, blank=True,
help_text=_("Note space for the conference organisers (not visible "
"to submitter)"))
status = models.CharField(_('status'), max_length=1, choices=TALK_STATUS,
default=SUBMITTED)
corresponding_author = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='contact_talks',
on_delete=models.CASCADE,
verbose_name=_("corresponding author"),
help_text=_(
"The person submitting the talk (and who questions regarding the "
"talk should be addressed to)."))
authors = models.ManyToManyField(
settings.AUTH_USER_MODEL, related_name='talks',
verbose_name=_("authors"),
help_text=lazy(authors_help, str))
video = models.BooleanField(
default=True,
verbose_name=_("video"),
help_text=format_lazy(_(
"By checking this, you are giving permission for the talk to be "
"videoed, and distributed by the conference, under the "
'<a href="{license_url}">{license_name} license</a>.'),
license_url=settings.WAFER_VIDEO_LICENSE_URL,
license_name=settings.WAFER_VIDEO_LICENSE,
),
)
video_reviewer = models.EmailField(
null=True, blank=True,
verbose_name=_("video reviewer"),
help_text=_(
"Email address of a person who will be allowed to review "
"and approve your video details. "
"Ideally, a second set of eyes who is not a busy conference "
"presenter. "
"But you can specify yourself, if you can't think of anyone else "
"who would care."
))
kv = models.ManyToManyField(KeyValue)
submission_time = models.DateTimeField(auto_now_add=True)
@property
def slug(self):
return slugify(self.title)
def __str__(self):
return u'%s: %s' % (self.corresponding_author, self.title)
def get_absolute_url(self):
return reverse('wafer_talk', kwargs={
'pk': self.talk_id,
'slug': self.slug,
})
def get_corresponding_author_contact(self):
email = self.corresponding_author.email
profile = self.corresponding_author.userprofile
if profile.contact_number:
contact = profile.contact_number
else:
# Should we wrap this in a span for styling?
contact = _('NO CONTACT INFO')
return '%s - %s' % (email, contact)
get_corresponding_author_contact.short_description = _('Contact Details')
def get_corresponding_author_name(self):
return render_author(self.corresponding_author)
get_corresponding_author_name.admin_order_field = 'corresponding_author'
get_corresponding_author_name.short_description = _('Corresponding Author')
def get_authors_display_name(self):
authors = list(self.authors.all())
# Corresponding authors first
authors.sort(
key=lambda author: u'' if author == self.corresponding_author
else author.userprofile.display_name())
names = [author.userprofile.display_name() for author in authors]
if len(names) <= 2:
return u' & '.join(names)
return _(u'%s, et al.') % names[0]
def get_in_schedule(self):
if self.scheduleitem_set.all():
return True
return False
get_in_schedule.short_description = _('Added to schedule')
get_in_schedule.boolean = True
def has_url(self):
"""Test if the talk has urls associated with it"""
if self.urls.all():
return True
return False
has_url.short_description = _('Has URL')
has_url.boolean = True
@property
def is_late_submission(self):
if self.talk_type and self.talk_type.submission_deadline:
return self.submission_time > self.talk_type.submission_deadline
else:
return False
@property
def review_score(self):
# Overridden in admin, to allow sorting
reviews = [review.avg_score for review in self.reviews.all() if review.avg_score is not None]
if not reviews:
return None
return sum(reviews) / len(reviews)
@property
def review_count(self):
# Overridden in admin, to allow sorting
return self.reviews.all().count()
# Helpful properties for the templates
accepted = property(fget=lambda x: x.status == ACCEPTED)
provisional = property(fget=lambda x: x.status == PROVISIONAL)
submitted = property(fget=lambda x: x.status == SUBMITTED)
under_consideration = property(
fget=lambda x: x.status == UNDER_CONSIDERATION)
reject = property(fget=lambda x: x.status == REJECTED)
cancelled = property(fget=lambda x: x.status == CANCELLED)
withdrawn = property(fget=lambda x: x.status == WITHDRAWN)
def _is_among_authors(self, user):
if self.corresponding_author.username == user.username:
return True
# not chaining with logical-or to avoid evaluation of the queryset
return self.authors.filter(username=user.username).exists()
def can_view(self, user):
if user.has_perm('talks.view_all_talks'):
return True
if self._is_among_authors(user):
return True
if self.accepted or self.cancelled:
return True
return False
@classmethod
def can_view_all(cls, user):
return user.has_perm('talks.view_all_talks')
def can_edit(self, user):
if user.has_perm('talks.change_talk'):
return True
if self.under_consideration or self.submitted:
if self._is_among_authors(user):
return True
return False
def can_review(self, user):
return (user.has_perm('talks.add_review')
and not self._is_among_authors(user))
@reversion.register()
class TalkUrl(models.Model):
""" A means for attaching relevant URLs to the talk (e.g. videos, slides).
These URL associations are explicitly not intended to be edited by the
author of the talk, but rather by the conference organizers.
URLs with public set to False are not publicly visible.
"""
description = models.CharField(max_length=256)
url = models.URLField(max_length=1024)
talk = models.ForeignKey(Talk, related_name='urls',
on_delete=models.CASCADE)
public = models.BooleanField(default=True)
@reversion.register(follow=('scores',))
class Review(models.Model):
talk = models.ForeignKey(Talk, on_delete=models.CASCADE,
verbose_name=_('talk'),
related_name='reviews')
reviewer = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_('reviewer'),
on_delete=models.CASCADE)
notes = MarkupField(
_('notes'),
null=True, blank=True,
help_text=_("Comments on the proposal (markdown)"))
def __str__(self):
return _(u'Review of %(title)s by %(reviewer)s (%(average_score)s)') % {
'title': self.talk.title,
'reviewer': self.reviewer,
'average_score': self.avg_score}
@property
def avg_score(self):
return self.scores.aggregate(total=models.Avg('value'))['total']
def is_current(self):
def last_updated(obj):
version = Version.objects.get_for_object(obj).first()
return version.revision.date_created
return last_updated(self) >= last_updated(self.talk)
is_current.boolean = True
class Meta:
unique_together = (('talk', 'reviewer'),)
verbose_name = _('review')
verbose_name_plural = _('reviews')
class ReviewAspect(models.Model):
name = models.CharField(_('name'), max_length=255)
def __str__(self):
return self.name
class Meta:
verbose_name = _('review aspect')
verbose_name_plural = _('review aspects')
@reversion.register()
class Score(models.Model):
review = models.ForeignKey(Review, on_delete=models.CASCADE,
related_name='scores')
aspect = models.ForeignKey(ReviewAspect, on_delete=models.CASCADE)
value = models.IntegerField(default=0, validators=[
validators.MinValueValidator(settings.WAFER_TALK_REVIEW_SCORES[0]),
validators.MaxValueValidator(settings.WAFER_TALK_REVIEW_SCORES[1])
])
def __str__(self):
review = self.review
return _(u'Review of %(title)s by %(reviewer)s on %(aspect)s: %(score)i') % {
'title': review.talk.title,
'reviewer': review.reviewer,
'aspect': self.aspect.name,
'score': self.value}
class Meta:
unique_together = (('review', 'aspect'),)
verbose_name = _('score')
verbose_name_plural = _('scores')
| |
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os, re
import platform
import tinctest
from tinctest.lib import run_shell_command, local_path
from tinctest import logger
class Gppkg:
DEFAULT_BUILD_PROD_URL = "http://artifacts-cache.ci.eng.pivotal.io/dist/GPDB"
GPPKG_URL = os.environ.get('GPPKG_RC_URL',None)
def check_pkg_exists(self, pkgname):
cmd = 'gppkg -q --all'
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command (cmd, 'run gppkg', res)
logger.debug(res['stdout'])
pkgs = res['stdout'].strip().split('\n')[1:]
for pkg in pkgs:
if pkgname in pkg:
return (True, pkg)
return (False, None)
def run_gppkg_uninstall(self, pkgname):
"""
@summary: Runs gppkg -r to uninstall a gppkg. Output is written to gppkg_r.log file in current directory.
@param pkgfile: The name of the .gppkg file
@raise GppkgUtilError: If gppkg uninstall fails
"""
(existed, pkg) = self.check_pkg_exists(pkgname)
if not existed:
logger.info('the package does not exist, no need to remove, %s'%pkgname)
return True
logger.debug( '\nGppkgUtil: Uninstalling gppkg using gppkg file: %s' % (pkg))
cmd = 'gppkg -r %s' % pkg
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command (cmd, 'run gppkg', res)
logger.debug(res)
if res['rc']> 0:
logger.info('Failed to Uninstall the package, %s' % pkgname)
return False
else:
return True
def run_gppkg_install(self, pkgfile):
"""
@summary: Runs gppkg -i to install a gppkg. Output is written to gppkg_i.log file in current directory.
@param pkgdir: The directory containing the gppkg file
@param pkgfile: The name of the .gppkg file in pkgdir
@raise GppkgUtilError: If gppkg install fails or if pkgfile specified does not exist
"""
if os.path.isfile(pkgfile):
logger.debug( '\nGppkgUtil: Installing gppkg using gppkg file: %s' % (pkgfile))
cmd = 'gppkg -i %s' % pkgfile
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command (cmd, 'run gppkg', res)
logger.debug(res)
if res['rc']> 0:
tinctest.logger.info('result from install package %s' % res['stdout'])
raise Exception('Failed to install the package')
self.check_and_install_sql(res['stdout'])
else:
raise Exception("*** ERROR: .gppkg file not found '. Make sure %s exists." % (pkgfile))
def check_and_install_sql(self, output = None):
lines = output.strip().split('\n')
res = {'rc':0, 'stderr':'', 'stdout':''}
for line in lines:
if 'Please run psql -d mydatabase -f $GPHOME' in line:
sql_path = os.environ.get('GPHOME') + line.split('Please run psql -d mydatabase -f $GPHOME')[1].split(' ')[0]
run_shell_command('psql -d %s -f %s' % (os.environ.get('PGDATABASE', 'gptest'), sql_path), 'run sql to build functions for the package', res)
tinctest.logger.info('running sql file %s, result is %s' % (sql_path, res['stdout']))
break
def download_pkg(self, product_version, gppkg):
"""
Download gppkg from artifacts server.
"""
target_dir = local_path('download/')
if not os.path.exists(target_dir):
os.makedirs(target_dir)
(rc, download_link, package_name) = self.get_download_url_from_build_prod(product_version, gppkg)
if rc != 0:
return (-1, None, None)
wget_cmd = 'wget --html-extension %s -O %s`basename %s`' % (download_link, target_dir, download_link)
logger.debug('Download link: %s' % wget_cmd)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command (wget_cmd, 'run wget', res)
if res['rc'] > 0:
raise Exception("Gppkg download failed")
return (0, target_dir, package_name)
def gppkg_install(self, product_version, gppkg):
(existed, _) = self.check_pkg_exists(gppkg)
if existed:
return True
(rc, pkg_dir, pkg_name) = self.download_pkg(product_version, gppkg)
if rc != 0:
return False
pkgfile = local_path(pkg_dir + pkg_name)
self.run_gppkg_install(pkgfile)
run_shell_command('gpstop -air')
return True
def get_download_url_from_build_prod(self, product_version, gppkg):
# defaults to be 4.2
gpdb_version = '4.2'
if product_version.startswith('4.3'):
gpdb_version = '4.3'
orca = ""
if product_version >= '4.3.5':
orca = 'orca'
os_, platform_ = self.get_os_platform()
compatiable = self.check_os_compatibility(os_, gppkg)
if not compatiable:
logger.error("the package %s is not compatiable with the os %s, please make sure the compatiable package exists" %(gppkg, os_))
return(-1, None, None)
build_prod_host = self.DEFAULT_BUILD_PROD_URL
gppkg_config = self.getconfig(product_version=gpdb_version, gppkg=gppkg)
gppkg_config['pkg'] = gppkg
gppkg_config['gpdbversion'] = gpdb_version
gppkg_config['os'] = self.failover_gppkg_to_os_version(os_, gppkg)
gppkg_config['platform'] = platform_
gppkg_config['type'] = 'gppkg'
gppkg_config['orca'] = orca
#GPDB 4.2 and 4.3 is having different nameing format for gppkg
if 'gpdbversion' in gppkg_config and 'ossversion' in gppkg_config:
gppkg_name = "%(pkg)s-ossv%(ossversion)s_pv%(version)s_gpdb%(gpdbversion)s%(orca)s-%(os)s-%(platform)s.%(type)s" % gppkg_config
elif gpdb_version == '4.3':
gppkg_name = "%(pkg)s-pv%(version)s_gpdb%(gpdbversion)s%(orca)s-%(os)s-%(platform)s.%(type)s" % gppkg_config
else:
gppkg_name = "%(pkg)s-%(version)s-%(os)s-%(platform)s.%(type)s" % gppkg_config
download_url = build_prod_host + '/gppkg/%(pkg)s/'%gppkg_config + gppkg_name
return (0, download_url, gppkg_name)
def getconfig(self, product_version='4.2', gppkg=None):
config_file = local_path('gppkg.'+product_version+'.config')
fd = open(config_file, 'r')
config = ()
for line in fd:
if gppkg in line:
properties = line.strip().split(":")[1]
config = dict(item.split("=") for item in properties.split(";") if item)
return config
def get_os_compatiable_pkg_list(self, os_ = 'rhel5'):
config_file = local_path('gppkg_platform.config')
if os.path.exists(config_file):
fd = open(config_file, 'r')
compatiable_pkg_list = []
for line in fd:
if os_ in line:
properties = line.strip().split(":")[1]
[compatiable_pkg_list.append(item) for item in properties.split(",") if item]
return compatiable_pkg_list
else:
raise Exception("gppkg_platform.config not found under: %s" % local_path(''))
def get_os_platform(self):
from sys import platform as _platform
machine = ''
if _platform == 'linux' or _platform == 'linux2': # Both SuSE and RHEL returns linux
if os.path.exists("/etc/SuSE-release"):
machine = 'suse'
else:
machine = 'redhat'
elif _platform == 'sunos5':
machine = 'solaris'
if not machine:
raise Exception('unable to determine the platform')
cmd = 'cat '
res = {'rc':0, 'stderr':'', 'stdout':''}
if machine.lower() == 'suse':
cmd = cmd + '/etc/SuSE-release'
run_shell_command (cmd, 'check os kernel version', res)
if 'SUSE Linux Enterprise Server 11' in res['stdout']:
os_ = 'suse11'
elif 'SUSE Linux Enterprise Server 10' in res['stdout']:
os_ = 'suse10'
elif machine.lower() == 'redhat':
cmd = cmd + '/etc/redhat-release'
run_shell_command (cmd, 'check os kernel version', res)
if 'Linux Server release 5.' in res['stdout']:
os_ = 'rhel5'
elif 'Linux Server release 6.' in res['stdout']:
os_ = 'rhel6'
elif machine.lower() == 'solaris':
cmd = cmd + '/etc/release'
run_shell_command (cmd, 'check os kernel version', res)
if 'Solaris 10' in res['stdout']:
os_ = 'sol10'
elif 'Solaris 11' in res['stdout']:
os_ = 'sol11'
logger.debug(res['stdout'])
return os_, platform.machine()
def check_os_compatibility(self, os_='rhel5', pkg_name=None):
gppkg_os_compatiable_list = self.get_os_compatiable_pkg_list(os_)
if pkg_name not in gppkg_os_compatiable_list:
return False
else:
return True
def failover_gppkg_to_os_version(self, os_=None, pkg_name=None):
""" this function basically return a gppkg version which works on current platform
except the plperl needs rhel6, rhel5, and suse10, suse11 for different platform
others can use the suse10, rhel5 version for both platforms
"""
if pkg_name == 'plperl':
return os_
else:
if os_ == 'suse11':
return 'suse10'
elif os_ == 'rhel6':
return 'rhel5'
elif os_ == 'sol11':
return 'sol10'
else:
return os_
| |
from django.db import connection, transaction
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import AutoField, Field, IntegerField, PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy, string_concat, ungettext, ugettext as _
from django.utils.functional import curry
from django.core import exceptions
from django import forms
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name, False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Add an accessor to allow easy determination of the related query path for this field
self.related_query_name = curry(self._get_related_query_name, cls._meta)
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {'class': cls.__name__.lower()}
other = self.rel.to
if isinstance(other, basestring):
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, related)
def get_db_prep_lookup(self, lookup_type, value, connection=connection):
# If we are doing a lookup on a Related Field, we must be
# comparing object instances. The value should be the PK of value,
# not value itself.
def pk_trace(value):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v, field = value, None
try:
while True:
v, field = getattr(v, v._meta.pk.name), v._meta.pk
except AttributeError:
pass
if field:
if lookup_type in ('range', 'in'):
v = [v]
v = field.get_db_prep_lookup(lookup_type, v, connection)
if isinstance(v, list):
v = v[0]
return v
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitally allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt']:
return [pk_trace(value)]
if lookup_type in ('range', 'in'):
return [pk_trace(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError, "Related Field has invalid lookup: %s" % lookup_type
def _get_related_query_name(self, opts):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = '_%s_cache' % related.get_accessor_name()
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
rel_obj = self.related.model._default_manager.get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self.related.opts.object_name
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
# Set the value of the related field
setattr(value, self.related.field.rel.get_related_field().attname, instance)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
def __get__(self, instance, instance_type=None):
if instance is None:
return self
cache_name = self.field.get_cache_name()
try:
return getattr(instance, cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
# If NULL is an allowed value, return it.
if self.field.null:
return None
raise self.field.rel.to.DoesNotExist
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
# If the related manager indicates that it should be used for
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self._field.name
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object cache now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.field.get_cache_name(), value)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
rel_field = self.related.field
rel_model = self.related.model
# Dynamically create a class that subclasses the related
# model's default manager.
superclass = self.related.model._default_manager.__class__
class RelatedManager(superclass):
def get_query_set(self):
return superclass.get_query_set(self).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
setattr(obj, rel_field.name, instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs.update({rel_field.name: instance})
return super(RelatedManager, self).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
return super(RelatedManager, self).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(instance, rel_field.rel.get_related_field().attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist, "%r is not related to %r." % (obj, instance)
remove.alters_data = True
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
clear.alters_data = True
manager = RelatedManager()
attname = rel_field.rel.get_related_field().name
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
getattr(instance, attname)}
manager.model = self.related.model
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
def create_many_related_manager(superclass, through=False):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_col_name=None, target_col_name=None):
super(ManyRelatedManager, self).__init__()
self.core_filters = core_filters
self.model = model
self.symmetrical = symmetrical
self.instance = instance
self.join_table = join_table
self.source_col_name = source_col_name
self.target_col_name = target_col_name
self.through = through
self._pk_val = self.instance._get_pk_val()
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
return superclass.get_query_set(self)._next_is_sticky().filter(**(self.core_filters))
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if through is None:
def add(self, *objs):
self._add_items(self.source_col_name, self.target_col_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_col_name, self.source_col_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_col_name, self.target_col_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_col_name, self.source_col_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_col_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_col_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if through is not None:
raise AttributeError, "Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
new_obj = super(ManyRelatedManager, self).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
obj, created = \
super(ManyRelatedManager, self).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_col_name, target_col_name, *objs):
# join_table: name of the m2m link table
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
new_ids.add(obj._get_pk_val())
else:
new_ids.add(obj)
# Add the newly created or already existing objects to the join table.
# First find out which items are already added, to avoid adding them twice
cursor = connection.cursor()
cursor.execute("SELECT %s FROM %s WHERE %s = %%s AND %s IN (%s)" % \
(target_col_name, self.join_table, source_col_name,
target_col_name, ",".join(['%s'] * len(new_ids))),
[self._pk_val] + list(new_ids))
existing_ids = set([row[0] for row in cursor.fetchall()])
# Add the ones that aren't there already
for obj_id in (new_ids - existing_ids):
cursor.execute("INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % \
(self.join_table, source_col_name, target_col_name),
[self._pk_val, obj_id])
transaction.commit_unless_managed()
def _remove_items(self, source_col_name, target_col_name, *objs):
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj._get_pk_val())
else:
old_ids.add(obj)
# Remove the specified objects from the join table
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE %s = %%s AND %s IN (%s)" % \
(self.join_table, source_col_name,
target_col_name, ",".join(['%s'] * len(old_ids))),
[self._pk_val] + list(old_ids))
transaction.commit_unless_managed()
def _clear_items(self, source_col_name):
# source_col_name: the PK colname in join_table for the source object
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE %s = %%s" % \
(self.join_table, source_col_name),
[self._pk_val])
transaction.commit_unless_managed()
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model = self.related.model
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.related.field.rel.through)
qn = connection.ops.quote_name
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.related.field.name: instance._get_pk_val()},
instance=instance,
symmetrical=False,
join_table=qn(self.related.field.m2m_db_table()),
source_col_name=qn(self.related.field.m2m_reverse_name()),
target_col_name=qn(self.related.field.m2m_column_name())
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
through = getattr(self.related.field.rel, 'through', None)
if through is not None:
raise AttributeError, "Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model=self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.field.rel.through)
qn = connection.ops.quote_name
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.field.related_query_name(): instance._get_pk_val()},
instance=instance,
symmetrical=(self.field.rel.symmetrical and instance.__class__ == rel_model),
join_table=qn(self.field.m2m_db_table()),
source_col_name=qn(self.field.m2m_column_name()),
target_col_name=qn(self.field.m2m_reverse_name())
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
through = getattr(self.field.rel, 'through', None)
if through is not None:
raise AttributeError, "Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None,
limit_choices_to=None, lookup_overrides=None, parent_link=False):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.lookup_overrides = lookup_overrides or {}
self.multiple = True
self.parent_link = parent_link
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None,
limit_choices_to=None, lookup_overrides=None, parent_link=False):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
lookup_overrides=lookup_overrides, parent_link=parent_link)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
to_field = to_field or to._meta.pk.name
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
lookup_overrides=kwargs.pop('lookup_overrides', None),
parent_link=kwargs.pop('parent_link', False))
Field.__init__(self, **kwargs)
self.db_index = True
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection=connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value, connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_unicode(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
def formfield(self, **kwargs):
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.complex_filter(
self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type()
return rel_field.db_type()
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
class ManyToManyField(RelatedField, Field):
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', True),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
self.creates_table = False
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
else:
self.creates_table = True
Field.__init__(self, **kwargs)
msg = ugettext_lazy('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through_model._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_column_name(self, related):
"Function that can be curried to provide the source column name for the m2m table"
try:
return self._m2m_column_name_cache
except:
if self.rel.through is not None:
for f in self.rel.through_model._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
self._m2m_column_name_cache = f.column
break
# If this is an m2m relation to self, avoid the inevitable name clash
elif related.model == related.parent_model:
self._m2m_column_name_cache = 'from_' + related.model._meta.object_name.lower() + '_id'
else:
self._m2m_column_name_cache = related.model._meta.object_name.lower() + '_id'
# Return the newly cached value
return self._m2m_column_name_cache
def _get_m2m_reverse_name(self, related):
"Function that can be curried to provide the related column name for the m2m table"
try:
return self._m2m_reverse_name_cache
except:
if self.rel.through is not None:
found = False
for f in self.rel.through_model._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
self._m2m_reverse_name_cache = f.column
break
else:
found = True
else:
self._m2m_reverse_name_cache = f.column
break
# If this is an m2m relation to self, avoid the inevitable name clash
elif related.model == related.parent_model:
self._m2m_reverse_name_cache = 'to_' + related.parent_model._meta.object_name.lower() + '_id'
else:
self._m2m_reverse_name_cache = related.parent_model._meta.object_name.lower() + '_id'
# Return the newly cached value
return self._m2m_reverse_name_cache
def isValidIDList(self, field_data, all_data):
"Validates that the value is a valid list of foreign keys"
mod = self.rel.to
try:
pks = map(int, field_data.split(','))
except ValueError:
# the CommaSeparatedIntegerField validator will catch this error
return
objects = mod._default_manager.in_bulk(pks)
if len(objects) != len(pks):
badkeys = [k for k in pks if k not in objects]
raise exceptions.ValidationError(
ungettext("Please enter valid %(self)s IDs. The value %(value)r is invalid.",
"Please enter valid %(self)s IDs. The values %(value)r are invalid.",
len(badkeys)) % {
'self': self.verbose_name,
'value': len(badkeys) == 1 and badkeys[0] or tuple(badkeys),
})
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_unicode(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and self.rel.to == "self" and self.rel.related_name is None:
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through_model = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
elif self.rel.through:
self.rel.through_model = self.rel.through
self.rel.through = self.rel.through._meta.object_name
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# m2m relations to self do not have a ManyRelatedObjectsDescriptor,
# as it would be redundant - unless the field is non-symmetrical.
if related.model != related.parent_model or not self.rel.symmetrical:
# Add the descriptor for the m2m relation
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_column_name, related)
self.m2m_reverse_name = curry(self._get_m2m_reverse_name, related)
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
defaults = {'form_class': forms.ModelMultipleChoiceField, 'queryset': self.rel.to._default_manager.complex_filter(self.rel.limit_choices_to)}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
defaults['initial'] = [i._get_pk_val() for i in defaults['initial']]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Attention layers that can be used in sequence DNN/CNN models.
This file follows the terminology of https://arxiv.org/abs/1706.03762 Figure 2.
Attention is formed by three tensors: Query, Key and Value.
"""
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import control_flow_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.util.tf_export import keras_export
class BaseDenseAttention(Layer):
"""Base Attention class for Dense networks.
This class is suitable for Dense or CNN networks, and not for RNN networks.
Implementations of attention mechanisms should inherit from this class, and
reuse the `apply_attention_scores()` method.
Args:
causal: Boolean. Set to `True` for decoder self-attention. Adds a mask such
that position `i` cannot attend to positions `j > i`. This prevents the
flow of information from the future towards the past.
dropout: Float between 0 and 1. Fraction of the units to drop for the
attention scores.
Call Args:
inputs: List of the following tensors:
* query: Query `Tensor` of shape `[batch_size, Tq, dim]`.
* value: Value `Tensor` of shape `[batch_size, Tv, dim]`.
* key: Optional key `Tensor` of shape `[batch_size, Tv, dim]`. If not
given, will use `value` for both `key` and `value`, which is the
most common case.
mask: List of the following tensors:
* query_mask: A boolean mask `Tensor` of shape `[batch_size, Tq]`.
If given, the output will be zero at the positions where
`mask==False`.
* value_mask: A boolean mask `Tensor` of shape `[batch_size, Tv]`.
If given, will apply the mask such that values at positions where
`mask==False` do not contribute to the result.
training: Python boolean indicating whether the layer should behave in
training mode (adding dropout) or in inference mode (no dropout).
return_attention_scores: bool, it `True`, returns the attention scores
(after masking and softmax) as an additional output argument.
Output:
Attention outputs of shape `[batch_size, Tq, dim]`.
[Optional] Attention scores after masking and softmax with shape
`[batch_size, Tq, Tv]`.
"""
def __init__(self, causal=False, dropout=0.0,
**kwargs):
super(BaseDenseAttention, self).__init__(**kwargs)
self.causal = causal
self.dropout = dropout
self.supports_masking = True
def _calculate_scores(self, query, key):
"""Calculates attention scores.
Args:
query: Query tensor of shape `[batch_size, Tq, dim]`.
key: Key tensor of shape `[batch_size, Tv, dim]`.
Returns:
Tensor of shape `[batch_size, Tq, Tv]`.
"""
return NotImplementedError
def _apply_scores(self, scores, value, scores_mask=None, training=None):
"""Applies attention scores to the given value tensor.
To use this method in your attention layer, follow the steps:
* Use `query` tensor of shape `[batch_size, Tq]` and `key` tensor of shape
`[batch_size, Tv]` to calculate the attention `scores`.
* Pass `scores` and `value` tensors to this method. The method applies
`scores_mask`, calculates `attention_distribution = softmax(scores)`, then
returns `matmul(attention_distribution, value).
* Apply `query_mask` and return the result.
Args:
scores: Scores float tensor of shape `[batch_size, Tq, Tv]`.
value: Value tensor of shape `[batch_size, Tv, dim]`.
scores_mask: A boolean mask `Tensor` of shape `[batch_size, 1, Tv]` or
`[batch_size, Tq, Tv]`. If given, scores at positions where
`scores_mask==False` do not contribute to the result. It must contain
at least one `True` value in each line along the last dimension.
training: Python boolean indicating whether the layer should behave in
training mode (adding dropout) or in inference mode (no dropout).
Returns:
Tensor of shape `[batch_size, Tq, dim]`.
Attention scores after masking and softmax with shape
`[batch_size, Tq, Tv]`.
"""
if scores_mask is not None:
padding_mask = math_ops.logical_not(scores_mask)
# Bias so padding positions do not contribute to attention distribution.
# Note 65504. is the max float16 value.
if scores.dtype is dtypes.float16:
scores -= 65504. * math_ops.cast(padding_mask, dtype=scores.dtype)
else:
scores -= 1.e9 * math_ops.cast(padding_mask, dtype=scores.dtype)
if training is None:
training = backend.learning_phase()
weights = nn.softmax(scores)
def dropped_weights():
return nn.dropout(weights, rate=self.dropout)
weights = control_flow_util.smart_cond(training, dropped_weights,
lambda: array_ops.identity(weights))
return math_ops.matmul(weights, value), weights
# TODO(b/125916026): Consider exposing a __call__ method with named args.
def call(self,
inputs,
mask=None,
training=None,
return_attention_scores=False):
self._validate_call_args(inputs=inputs, mask=mask)
q = inputs[0]
v = inputs[1]
k = inputs[2] if len(inputs) > 2 else v
q_mask = mask[0] if mask else None
v_mask = mask[1] if mask else None
scores = self._calculate_scores(query=q, key=k)
if v_mask is not None:
# Mask of shape [batch_size, 1, Tv].
v_mask = array_ops.expand_dims(v_mask, axis=-2)
if self.causal:
# Creates a lower triangular mask, so position i cannot attend to
# positions j>i. This prevents the flow of information from the future
# into the past.
scores_shape = array_ops.shape(scores)
# causal_mask_shape = [1, Tq, Tv].
causal_mask_shape = array_ops.concat(
[array_ops.ones_like(scores_shape[:-2]), scores_shape[-2:]],
axis=0)
causal_mask = _lower_triangular_mask(causal_mask_shape)
else:
causal_mask = None
scores_mask = _merge_masks(v_mask, causal_mask)
result, attention_scores = self._apply_scores(
scores=scores, value=v, scores_mask=scores_mask, training=training)
if q_mask is not None:
# Mask of shape [batch_size, Tq, 1].
q_mask = array_ops.expand_dims(q_mask, axis=-1)
result *= math_ops.cast(q_mask, dtype=result.dtype)
if return_attention_scores:
return result, attention_scores
return result
def compute_mask(self, inputs, mask=None):
self._validate_call_args(inputs=inputs, mask=mask)
if mask:
q_mask = mask[0]
if q_mask is None:
return None
return ops.convert_to_tensor_v2_with_dispatch(q_mask)
return None
def _validate_call_args(self, inputs, mask):
"""Validates arguments of the call method."""
class_name = self.__class__.__name__
if not isinstance(inputs, list):
raise ValueError(
'{} layer must be called on a list of inputs, namely [query, value] '
'or [query, value, key].'.format(class_name))
if len(inputs) < 2 or len(inputs) > 3:
raise ValueError(
'{} layer accepts inputs list of length 2 or 3, '
'namely [query, value] or [query, value, key]. '
'Given length: {}'.format(class_name, len(inputs)))
if mask:
if not isinstance(mask, list):
raise ValueError(
'{} layer mask must be a list, '
'namely [query_mask, value_mask].'.format(class_name))
if len(mask) < 2 or len(mask) > len(inputs):
raise ValueError(
'{} layer mask must be a list of length 2, namely [query_mask, '
'value_mask]. Given length: {}'.format(class_name, len(mask)))
def get_config(self):
config = {
'causal': self.causal,
'dropout': self.dropout,
}
base_config = super(BaseDenseAttention, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@keras_export('keras.layers.Attention')
class Attention(BaseDenseAttention):
"""Dot-product attention layer, a.k.a. Luong-style attention.
Inputs are `query` tensor of shape `[batch_size, Tq, dim]`, `value` tensor of
shape `[batch_size, Tv, dim]` and `key` tensor of shape
`[batch_size, Tv, dim]`. The calculation follows the steps:
1. Calculate scores with shape `[batch_size, Tq, Tv]` as a `query`-`key` dot
product: `scores = tf.matmul(query, key, transpose_b=True)`.
2. Use scores to calculate a distribution with shape
`[batch_size, Tq, Tv]`: `distribution = tf.nn.softmax(scores)`.
3. Use `distribution` to create a linear combination of `value` with
shape `[batch_size, Tq, dim]`:
`return tf.matmul(distribution, value)`.
Args:
use_scale: If `True`, will create a scalar variable to scale the attention
scores.
causal: Boolean. Set to `True` for decoder self-attention. Adds a mask such
that position `i` cannot attend to positions `j > i`. This prevents the
flow of information from the future towards the past.
dropout: Float between 0 and 1. Fraction of the units to drop for the
attention scores.
Call Args:
inputs: List of the following tensors:
* query: Query `Tensor` of shape `[batch_size, Tq, dim]`.
* value: Value `Tensor` of shape `[batch_size, Tv, dim]`.
* key: Optional key `Tensor` of shape `[batch_size, Tv, dim]`. If not
given, will use `value` for both `key` and `value`, which is the
most common case.
mask: List of the following tensors:
* query_mask: A boolean mask `Tensor` of shape `[batch_size, Tq]`.
If given, the output will be zero at the positions where
`mask==False`.
* value_mask: A boolean mask `Tensor` of shape `[batch_size, Tv]`.
If given, will apply the mask such that values at positions where
`mask==False` do not contribute to the result.
return_attention_scores: bool, it `True`, returns the attention scores
(after masking and softmax) as an additional output argument.
training: Python boolean indicating whether the layer should behave in
training mode (adding dropout) or in inference mode (no dropout).
Output:
Attention outputs of shape `[batch_size, Tq, dim]`.
[Optional] Attention scores after masking and softmax with shape
`[batch_size, Tq, Tv]`.
The meaning of `query`, `value` and `key` depend on the application. In the
case of text similarity, for example, `query` is the sequence embeddings of
the first piece of text and `value` is the sequence embeddings of the second
piece of text. `key` is usually the same tensor as `value`.
Here is a code example for using `Attention` in a CNN+Attention network:
```python
# Variable-length int sequences.
query_input = tf.keras.Input(shape=(None,), dtype='int32')
value_input = tf.keras.Input(shape=(None,), dtype='int32')
# Embedding lookup.
token_embedding = tf.keras.layers.Embedding(input_dim=1000, output_dim=64)
# Query embeddings of shape [batch_size, Tq, dimension].
query_embeddings = token_embedding(query_input)
# Value embeddings of shape [batch_size, Tv, dimension].
value_embeddings = token_embedding(value_input)
# CNN layer.
cnn_layer = tf.keras.layers.Conv1D(
filters=100,
kernel_size=4,
# Use 'same' padding so outputs have the same shape as inputs.
padding='same')
# Query encoding of shape [batch_size, Tq, filters].
query_seq_encoding = cnn_layer(query_embeddings)
# Value encoding of shape [batch_size, Tv, filters].
value_seq_encoding = cnn_layer(value_embeddings)
# Query-value attention of shape [batch_size, Tq, filters].
query_value_attention_seq = tf.keras.layers.Attention()(
[query_seq_encoding, value_seq_encoding])
# Reduce over the sequence axis to produce encodings of shape
# [batch_size, filters].
query_encoding = tf.keras.layers.GlobalAveragePooling1D()(
query_seq_encoding)
query_value_attention = tf.keras.layers.GlobalAveragePooling1D()(
query_value_attention_seq)
# Concatenate query and document encodings to produce a DNN input layer.
input_layer = tf.keras.layers.Concatenate()(
[query_encoding, query_value_attention])
# Add DNN layers, and create Model.
# ...
```
"""
def __init__(self, use_scale=False, **kwargs):
super(Attention, self).__init__(**kwargs)
self.use_scale = use_scale
def build(self, input_shape):
"""Creates scale variable if use_scale==True."""
if self.use_scale:
self.scale = self.add_weight(
name='scale',
shape=(),
initializer=init_ops.ones_initializer(),
dtype=self.dtype,
trainable=True)
else:
self.scale = None
super(Attention, self).build(input_shape)
def _calculate_scores(self, query, key):
"""Calculates attention scores as a query-key dot product.
Args:
query: Query tensor of shape `[batch_size, Tq, dim]`.
key: Key tensor of shape `[batch_size, Tv, dim]`.
Returns:
Tensor of shape `[batch_size, Tq, Tv]`.
"""
scores = math_ops.matmul(query, key, transpose_b=True)
if self.scale is not None:
scores *= self.scale
return scores
def get_config(self):
config = {'use_scale': self.use_scale}
base_config = super(Attention, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@keras_export('keras.layers.AdditiveAttention')
class AdditiveAttention(BaseDenseAttention):
"""Additive attention layer, a.k.a. Bahdanau-style attention.
Inputs are `query` tensor of shape `[batch_size, Tq, dim]`, `value` tensor of
shape `[batch_size, Tv, dim]` and `key` tensor of shape
`[batch_size, Tv, dim]`. The calculation follows the steps:
1. Reshape `query` and `value` into shapes `[batch_size, Tq, 1, dim]`
and `[batch_size, 1, Tv, dim]` respectively.
2. Calculate scores with shape `[batch_size, Tq, Tv]` as a non-linear
sum: `scores = tf.reduce_sum(tf.tanh(query + value), axis=-1)`
3. Use scores to calculate a distribution with shape
`[batch_size, Tq, Tv]`: `distribution = tf.nn.softmax(scores)`.
4. Use `distribution` to create a linear combination of `value` with
shape `[batch_size, Tq, dim]`:
`return tf.matmul(distribution, value)`.
Args:
use_scale: If `True`, will create a variable to scale the attention scores.
causal: Boolean. Set to `True` for decoder self-attention. Adds a mask such
that position `i` cannot attend to positions `j > i`. This prevents the
flow of information from the future towards the past.
dropout: Float between 0 and 1. Fraction of the units to drop for the
attention scores.
Call Args:
inputs: List of the following tensors:
* query: Query `Tensor` of shape `[batch_size, Tq, dim]`.
* value: Value `Tensor` of shape `[batch_size, Tv, dim]`.
* key: Optional key `Tensor` of shape `[batch_size, Tv, dim]`. If not
given, will use `value` for both `key` and `value`, which is the
most common case.
mask: List of the following tensors:
* query_mask: A boolean mask `Tensor` of shape `[batch_size, Tq]`.
If given, the output will be zero at the positions where
`mask==False`.
* value_mask: A boolean mask `Tensor` of shape `[batch_size, Tv]`.
If given, will apply the mask such that values at positions where
`mask==False` do not contribute to the result.
training: Python boolean indicating whether the layer should behave in
training mode (adding dropout) or in inference mode (no dropout).
return_attention_scores: bool, it `True`, returns the attention scores
(after masking and softmax) as an additional output argument.
Output:
Attention outputs of shape `[batch_size, Tq, dim]`.
[Optional] Attention scores after masking and softmax with shape
`[batch_size, Tq, Tv]`.
The meaning of `query`, `value` and `key` depend on the application. In the
case of text similarity, for example, `query` is the sequence embeddings of
the first piece of text and `value` is the sequence embeddings of the second
piece of text. `key` is usually the same tensor as `value`.
Here is a code example for using `AdditiveAttention` in a CNN+Attention
network:
```python
# Variable-length int sequences.
query_input = tf.keras.Input(shape=(None,), dtype='int32')
value_input = tf.keras.Input(shape=(None,), dtype='int32')
# Embedding lookup.
token_embedding = tf.keras.layers.Embedding(max_tokens, dimension)
# Query embeddings of shape [batch_size, Tq, dimension].
query_embeddings = token_embedding(query_input)
# Value embeddings of shape [batch_size, Tv, dimension].
value_embeddings = token_embedding(value_input)
# CNN layer.
cnn_layer = tf.keras.layers.Conv1D(
filters=100,
kernel_size=4,
# Use 'same' padding so outputs have the same shape as inputs.
padding='same')
# Query encoding of shape [batch_size, Tq, filters].
query_seq_encoding = cnn_layer(query_embeddings)
# Value encoding of shape [batch_size, Tv, filters].
value_seq_encoding = cnn_layer(value_embeddings)
# Query-value attention of shape [batch_size, Tq, filters].
query_value_attention_seq = tf.keras.layers.AdditiveAttention()(
[query_seq_encoding, value_seq_encoding])
# Reduce over the sequence axis to produce encodings of shape
# [batch_size, filters].
query_encoding = tf.keras.layers.GlobalAveragePooling1D()(
query_seq_encoding)
query_value_attention = tf.keras.layers.GlobalAveragePooling1D()(
query_value_attention_seq)
# Concatenate query and document encodings to produce a DNN input layer.
input_layer = tf.keras.layers.Concatenate()(
[query_encoding, query_value_attention])
# Add DNN layers, and create Model.
# ...
```
"""
def __init__(self, use_scale=True, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.use_scale = use_scale
def build(self, input_shape):
v_shape = tensor_shape.TensorShape(input_shape[1])
dim = v_shape[-1]
if isinstance(dim, tensor_shape.Dimension):
dim = dim.value
if self.use_scale:
self.scale = self.add_weight(
name='scale',
shape=[dim],
initializer=init_ops.glorot_uniform_initializer(),
dtype=self.dtype,
trainable=True)
else:
self.scale = None
super(AdditiveAttention, self).build(input_shape)
def _calculate_scores(self, query, key):
"""Calculates attention scores as a nonlinear sum of query and key.
Args:
query: Query tensor of shape `[batch_size, Tq, dim]`.
key: Key tensor of shape `[batch_size, Tv, dim]`.
Returns:
Tensor of shape `[batch_size, Tq, Tv]`.
"""
# Reshape tensors to enable broadcasting.
# Reshape into [batch_size, Tq, 1, dim].
q_reshaped = array_ops.expand_dims(query, axis=-2)
# Reshape into [batch_size, 1, Tv, dim].
k_reshaped = array_ops.expand_dims(key, axis=-3)
if self.use_scale:
scale = self.scale
else:
scale = 1.
return math_ops.reduce_sum(
scale * math_ops.tanh(q_reshaped + k_reshaped), axis=-1)
def get_config(self):
config = {'use_scale': self.use_scale}
base_config = super(AdditiveAttention, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _lower_triangular_mask(shape):
"""Creates a lower-triangular boolean mask over the last 2 dimensions."""
row_index = math_ops.cumsum(
array_ops.ones(shape=shape, dtype=dtypes.int32), axis=-2)
col_index = math_ops.cumsum(
array_ops.ones(shape=shape, dtype=dtypes.int32), axis=-1)
return math_ops.greater_equal(row_index, col_index)
def _merge_masks(x, y):
if x is None:
return y
if y is None:
return x
return math_ops.logical_and(x, y)
| |
# -*- coding: utf-8 -*-
"""
werkzeug.debug.tbtools
~~~~~~~~~~~~~~~~~~~~~~
This module provides various traceback related utility functions.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import re
import os
import sys
import json
import inspect
import traceback
import codecs
from tokenize import TokenError
from werkzeug.utils import cached_property, escape
from werkzeug.debug.console import Console
from werkzeug._compat import range_type, PY2, text_type, string_types, \
to_native, to_unicode
from werkzeug.filesystem import get_filesystem_encoding
_coding_re = re.compile(br'coding[:=]\s*([-\w.]+)')
_line_re = re.compile(br'^(.*?)$(?m)')
_funcdef_re = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
UTF8_COOKIE = b'\xef\xbb\xbf'
system_exceptions = (SystemExit, KeyboardInterrupt)
try:
system_exceptions += (GeneratorExit,)
except NameError:
pass
HEADER = u'''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>%(title)s // Werkzeug Debugger</title>
<link rel="stylesheet" href="?__debugger__=yes&cmd=resource&f=style.css"
type="text/css">
<!-- We need to make sure this has a favicon so that the debugger does not by
accident trigger a request to /favicon.ico which might change the application
state. -->
<link rel="shortcut icon"
href="?__debugger__=yes&cmd=resource&f=console.png">
<script src="?__debugger__=yes&cmd=resource&f=jquery.js"></script>
<script src="?__debugger__=yes&cmd=resource&f=debugger.js"></script>
<script type="text/javascript">
var TRACEBACK = %(traceback_id)d,
CONSOLE_MODE = %(console)s,
EVALEX = %(evalex)s,
SECRET = "%(secret)s";
</script>
</head>
<body>
<div class="debugger">
'''
FOOTER = u'''\
<div class="footer">
Brought to you by <strong class="arthur">DON'T PANIC</strong>, your
friendly Werkzeug powered traceback interpreter.
</div>
</div>
</body>
</html>
'''
PAGE_HTML = HEADER + u'''\
<h1>%(exception_type)s</h1>
<div class="detail">
<p class="errormsg">%(exception)s</p>
</div>
<h2 class="traceback">Traceback <em>(most recent call last)</em></h2>
%(summary)s
<div class="plain">
<form action="/?__debugger__=yes&cmd=paste" method="post">
<p>
<input type="hidden" name="language" value="pytb">
This is the Copy/Paste friendly version of the traceback. <span
class="pastemessage">You can also paste this traceback into
a <a href="https://gist.github.com/">gist</a>:
<input type="submit" value="create paste"></span>
</p>
<textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea>
</form>
</div>
<div class="explanation">
The debugger caught an exception in your WSGI application. You can now
look at the traceback which led to the error. <span class="nojavascript">
If you enable JavaScript you can also use additional features such as code
execution (if the evalex feature is enabled), automatic pasting of the
exceptions and much more.</span>
</div>
''' + FOOTER + '''
<!--
%(plaintext_cs)s
-->
'''
CONSOLE_HTML = HEADER + u'''\
<h1>Interactive Console</h1>
<div class="explanation">
In this console you can execute Python expressions in the context of the
application. The initial namespace was created by the debugger automatically.
</div>
<div class="console"><div class="inner">The Console requires JavaScript.</div></div>
''' + FOOTER
SUMMARY_HTML = u'''\
<div class="%(classes)s">
%(title)s
<ul>%(frames)s</ul>
%(description)s
</div>
'''
FRAME_HTML = u'''\
<div class="frame" id="frame-%(id)d">
<h4>File <cite class="filename">"%(filename)s"</cite>,
line <em class="line">%(lineno)s</em>,
in <code class="function">%(function_name)s</code></h4>
<pre>%(current_line)s</pre>
</div>
'''
SOURCE_TABLE_HTML = u'<table class=source>%s</table>'
SOURCE_LINE_HTML = u'''\
<tr class="%(classes)s">
<td class=lineno>%(lineno)s</td>
<td>%(code)s</td>
</tr>
'''
def render_console_html(secret):
return CONSOLE_HTML % {
'evalex': 'true',
'console': 'true',
'title': 'Console',
'secret': secret,
'traceback_id': -1
}
def get_current_traceback(ignore_system_exceptions=False,
show_hidden_frames=False, skip=0):
"""Get the current exception info as `Traceback` object. Per default
calling this method will reraise system exceptions such as generator exit,
system exit or others. This behavior can be disabled by passing `False`
to the function as first parameter.
"""
exc_type, exc_value, tb = sys.exc_info()
if ignore_system_exceptions and exc_type in system_exceptions:
raise
for x in range_type(skip):
if tb.tb_next is None:
break
tb = tb.tb_next
tb = Traceback(exc_type, exc_value, tb)
if not show_hidden_frames:
tb.filter_hidden_frames()
return tb
class Line(object):
"""Helper for the source renderer."""
__slots__ = ('lineno', 'code', 'in_frame', 'current')
def __init__(self, lineno, code):
self.lineno = lineno
self.code = code
self.in_frame = False
self.current = False
def classes(self):
rv = ['line']
if self.in_frame:
rv.append('in-frame')
if self.current:
rv.append('current')
return rv
classes = property(classes)
def render(self):
return SOURCE_LINE_HTML % {
'classes': u' '.join(self.classes),
'lineno': self.lineno,
'code': escape(self.code)
}
class Traceback(object):
"""Wraps a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
if not isinstance(exc_type, str):
exception_type = exc_type.__name__
if exc_type.__module__ not in ('__builtin__', 'exceptions'):
exception_type = exc_type.__module__ + '.' + exception_type
else:
exception_type = exc_type
self.exception_type = exception_type
# we only add frames to the list that are not hidden. This follows
# the the magic variables as defined by paste.exceptions.collector
self.frames = []
while tb:
self.frames.append(Frame(exc_type, exc_value, tb))
tb = tb.tb_next
def filter_hidden_frames(self):
"""Remove the frames according to the paste spec."""
if not self.frames:
return
new_frames = []
hidden = False
for frame in self.frames:
hide = frame.hide
if hide in ('before', 'before_and_this'):
new_frames = []
hidden = False
if hide == 'before_and_this':
continue
elif hide in ('reset', 'reset_and_this'):
hidden = False
if hide == 'reset_and_this':
continue
elif hide in ('after', 'after_and_this'):
hidden = True
if hide == 'after_and_this':
continue
elif hide or hidden:
continue
new_frames.append(frame)
# if we only have one frame and that frame is from the codeop
# module, remove it.
if len(new_frames) == 1 and self.frames[0].module == 'codeop':
del self.frames[:]
# if the last frame is missing something went terrible wrong :(
elif self.frames[-1] in new_frames:
self.frames[:] = new_frames
def is_syntax_error(self):
"""Is it a syntax error?"""
return isinstance(self.exc_value, SyntaxError)
is_syntax_error = property(is_syntax_error)
def exception(self):
"""String representation of the exception."""
buf = traceback.format_exception_only(self.exc_type, self.exc_value)
rv = ''.join(buf).strip()
return rv.decode('utf-8', 'replace') if PY2 else rv
exception = property(exception)
def log(self, logfile=None):
"""Log the ASCII traceback into a file object."""
if logfile is None:
logfile = sys.stderr
tb = self.plaintext.rstrip() + u'\n'
if PY2:
tb = tb.encode('utf-8', 'replace')
logfile.write(tb)
def paste(self):
"""Create a paste and return the paste id."""
data = json.dumps({
'description': 'Werkzeug Internal Server Error',
'public': False,
'files': {
'traceback.txt': {
'content': self.plaintext
}
}
}).encode('utf-8')
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
rv = urlopen('https://api.github.com/gists', data=data)
resp = json.loads(rv.read().decode('utf-8'))
rv.close()
return {
'url': resp['html_url'],
'id': resp['id']
}
def render_summary(self, include_title=True):
"""Render the traceback for the interactive console."""
title = ''
frames = []
classes = ['traceback']
if not self.frames:
classes.append('noframe-traceback')
if include_title:
if self.is_syntax_error:
title = u'Syntax Error'
else:
title = u'Traceback <em>(most recent call last)</em>:'
for frame in self.frames:
frames.append(u'<li%s>%s' % (
frame.info and u' title="%s"' % escape(frame.info) or u'',
frame.render()
))
if self.is_syntax_error:
description_wrapper = u'<pre class=syntaxerror>%s</pre>'
else:
description_wrapper = u'<blockquote>%s</blockquote>'
return SUMMARY_HTML % {
'classes': u' '.join(classes),
'title': title and u'<h3>%s</h3>' % title or u'',
'frames': u'\n'.join(frames),
'description': description_wrapper % escape(self.exception)
}
def render_full(self, evalex=False, secret=None):
"""Render the Full HTML page with the traceback info."""
exc = escape(self.exception)
return PAGE_HTML % {
'evalex': evalex and 'true' or 'false',
'console': 'false',
'title': exc,
'exception': exc,
'exception_type': escape(self.exception_type),
'summary': self.render_summary(include_title=False),
'plaintext': self.plaintext,
'plaintext_cs': re.sub('-{2,}', '-', self.plaintext),
'traceback_id': self.id,
'secret': secret
}
def generate_plaintext_traceback(self):
"""Like the plaintext attribute but returns a generator"""
yield u'Traceback (most recent call last):'
for frame in self.frames:
yield u' File "%s", line %s, in %s' % (
frame.filename,
frame.lineno,
frame.function_name
)
yield u' ' + frame.current_line.strip()
yield self.exception
def plaintext(self):
return u'\n'.join(self.generate_plaintext_traceback())
plaintext = cached_property(plaintext)
id = property(lambda x: id(x))
class Frame(object):
"""A single frame in a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.lineno = tb.tb_lineno
self.function_name = tb.tb_frame.f_code.co_name
self.locals = tb.tb_frame.f_locals
self.globals = tb.tb_frame.f_globals
fn = inspect.getsourcefile(tb) or inspect.getfile(tb)
if fn[-4:] in ('.pyo', '.pyc'):
fn = fn[:-1]
# if it's a file on the file system resolve the real filename.
if os.path.isfile(fn):
fn = os.path.realpath(fn)
self.filename = to_unicode(fn, get_filesystem_encoding())
self.module = self.globals.get('__name__')
self.loader = self.globals.get('__loader__')
self.code = tb.tb_frame.f_code
# support for paste's traceback extensions
self.hide = self.locals.get('__traceback_hide__', False)
info = self.locals.get('__traceback_info__')
if info is not None:
try:
info = text_type(info)
except UnicodeError:
info = str(info).decode('utf-8', 'replace')
self.info = info
def render(self):
"""Render a single frame in a traceback."""
return FRAME_HTML % {
'id': self.id,
'filename': escape(self.filename),
'lineno': self.lineno,
'function_name': escape(self.function_name),
'current_line': escape(self.current_line.strip())
}
def get_annotated_lines(self):
"""Helper function that returns lines with extra information."""
lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)]
# find function definition and mark lines
if hasattr(self.code, 'co_firstlineno'):
lineno = self.code.co_firstlineno - 1
while lineno > 0:
if _funcdef_re.match(lines[lineno].code):
break
lineno -= 1
try:
offset = len(inspect.getblock([x.code + '\n' for x
in lines[lineno:]]))
except TokenError:
offset = 0
for line in lines[lineno:lineno + offset]:
line.in_frame = True
# mark current line
try:
lines[self.lineno - 1].current = True
except IndexError:
pass
return lines
def render_source(self):
"""Render the sourcecode."""
return SOURCE_TABLE_HTML % u'\n'.join(line.render() for line in
self.get_annotated_lines())
def eval(self, code, mode='single'):
"""Evaluate code in the context of the frame."""
if isinstance(code, string_types):
if PY2 and isinstance(code, unicode): # noqa
code = UTF8_COOKIE + code.encode('utf-8')
code = compile(code, '<interactive>', mode)
return eval(code, self.globals, self.locals)
@cached_property
def sourcelines(self):
"""The sourcecode of the file as list of unicode strings."""
# get sourcecode from loader or file
source = None
if self.loader is not None:
try:
if hasattr(self.loader, 'get_source'):
source = self.loader.get_source(self.module)
elif hasattr(self.loader, 'get_source_by_code'):
source = self.loader.get_source_by_code(self.code)
except Exception:
# we munch the exception so that we don't cause troubles
# if the loader is broken.
pass
if source is None:
try:
f = open(to_native(self.filename, get_filesystem_encoding()),
mode='rb')
except IOError:
return []
try:
source = f.read()
finally:
f.close()
# already unicode? return right away
if isinstance(source, text_type):
return source.splitlines()
# yes. it should be ascii, but we don't want to reject too many
# characters in the debugger if something breaks
charset = 'utf-8'
if source.startswith(UTF8_COOKIE):
source = source[3:]
else:
for idx, match in enumerate(_line_re.finditer(source)):
match = _coding_re.search(match.group())
if match is not None:
charset = match.group(1)
break
if idx > 1:
break
# on broken cookies we fall back to utf-8 too
charset = to_native(charset)
try:
codecs.lookup(charset)
except LookupError:
charset = 'utf-8'
return source.decode(charset, 'replace').splitlines()
@property
def current_line(self):
try:
return self.sourcelines[self.lineno - 1]
except IndexError:
return u''
@cached_property
def console(self):
return Console(self.globals, self.locals)
id = property(lambda x: id(x))
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Pynliner : Convert CSS to inline styles
Python CSS-to-inline-styles conversion tool for HTML using BeautifulSoup and
cssutils
Copyright (c) 2011-2013 Tanner Netterville
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
The generated output of this software shall not be used in a mass marketing
service.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__version__ = "0.5.1"
import re
import urlparse
import urllib2
import cssutils
from BeautifulSoup import BeautifulSoup, Comment
from soupselect import select
class Pynliner(object):
"""Pynliner class"""
soup = False
style_string = False
stylesheet = False
output = False
def __init__(self, log=None, allow_conditional_comments=False):
self.log = log
cssutils.log.enabled = False if log is None else True
self.extra_style_strings = []
self.allow_conditional_comments = allow_conditional_comments
self.root_url = None
self.relative_url = None
def from_url(self, url):
"""Gets remote HTML page for conversion
Downloads HTML page from `url` as a string and passes it to the
`from_string` method. Also sets `self.root_url` and `self.relative_url`
for use in importing <link> elements.
Returns self.
>>> p = Pynliner()
>>> p.from_url('http://somewebsite.com/file.html')
<Pynliner object at 0x26ac70>
"""
self.url = url
self.relative_url = '/'.join(url.split('/')[:-1]) + '/'
self.root_url = '/'.join(url.split('/')[:3])
self.source_string = self._get_url(self.url)
return self
def from_string(self, string):
"""Generates a Pynliner object from the given HTML string.
Returns self.
>>> p = Pynliner()
>>> p.from_string('<style>h1 {color:#ffcc00;}</style><h1>Hi</h1>')
<Pynliner object at 0x26ac70>
"""
self.source_string = string
return self
def with_cssString(self, css_string):
"""Adds external CSS to the Pynliner object. Can be "chained".
Returns self.
>>> html = "<h1>Hello World!</h1>"
>>> css = "h1 { color:#ffcc00; }"
>>> p = Pynliner()
>>> p.from_string(html).with_cssString(css)
<pynliner.Pynliner object at 0x2ca810>
"""
self.extra_style_strings.append(css_string)
return self
def run(self):
"""Applies each step of the process if they have not already been
performed.
Returns Unicode output with applied styles.
>>> html = "<style>h1 { color:#ffcc00; }</style><h1>Hello World!</h1>"
>>> Pynliner().from_string(html).run()
u'<h1 style="color: #fc0">Hello World!</h1>'
"""
if not self.soup:
self._get_soup()
if not self.stylesheet:
self._get_styles()
self._apply_styles()
self._get_output()
self._clean_output()
return self.output
def _get_url(self, url):
"""Returns the response content from the given url
"""
return urllib2.urlopen(url).read()
def _get_soup(self):
"""Convert source string to BeautifulSoup object. Sets it to self.soup.
If using mod_wgsi, use html5 parsing to prevent BeautifulSoup
incompatibility.
"""
# Check if mod_wsgi is running
# - see http://code.google.com/p/modwsgi/wiki/TipsAndTricks
try:
from mod_wsgi import version
self.soup = BeautifulSoup(self.source_string, "html5lib")
except:
self.soup = BeautifulSoup(self.source_string)
def _get_styles(self):
"""Gets all CSS content from and removes all <link rel="stylesheet"> and
<style> tags concatenating into one CSS string which is then parsed with
cssutils and the resulting CSSStyleSheet object set to
`self.stylesheet`.
"""
self._get_external_styles()
self._get_internal_styles()
for style_string in self.extra_style_strings:
self.style_string += style_string
cssparser = cssutils.CSSParser(log=self.log)
self.stylesheet = cssparser.parseString(self.style_string)
def _get_external_styles(self):
"""Gets <link> element styles
"""
if not self.style_string:
self.style_string = u''
else:
self.style_string += u'\n'
link_tags = self.soup.findAll('link', {'rel': 'stylesheet'})
for tag in link_tags:
url = tag['href']
# Convert the relative URL to an absolute URL ready to pass to urllib
base_url = self.relative_url or self.root_url
url = urlparse.urljoin(base_url, url)
self.style_string += self._get_url(url)
tag.extract()
def _get_internal_styles(self):
"""Gets <style> element styles
"""
if not self.style_string:
self.style_string = u''
else:
self.style_string += u'\n'
style_tags = self.soup.findAll('style')
for tag in style_tags:
self.style_string += u'\n'.join(tag.contents) + u'\n'
tag.extract()
def _get_specificity_from_list(self, lst):
"""
Takes an array of ints and returns an integer formed
by adding all ints multiplied by the power of 10 of the current index
(1, 0, 0, 1) => (1 * 10**3) + (0 * 10**2) + (0 * 10**1) + (1 * 10**0) => 1001
"""
return int(''.join(map(str, lst)))
def _get_rule_specificity(self, rule):
"""
For a given CSSRule get its selector specificity in base 10
"""
return sum(map(self._get_specificity_from_list, (s.specificity for s in rule.selectorList)))
def _apply_styles(self):
"""Steps through CSS rules and applies each to all the proper elements
as @style attributes prepending any current @style attributes.
"""
rules = self.stylesheet.cssRules.rulesOfType(1)
elem_prop_map = {}
elem_style_map = {}
# build up a property list for every styled element
for rule in rules:
# select elements for every selector
selectors = rule.selectorText.split(',')
elements = []
for selector in selectors:
elements += select(self.soup, selector)
# build prop_list for each selected element
for elem in elements:
if elem not in elem_prop_map:
elem_prop_map[elem] = []
elem_prop_map[elem].append({
'specificity': self._get_rule_specificity(rule),
'props': rule.style.getProperties(),
})
# build up another property list using selector specificity
for elem, props in elem_prop_map.items():
if elem not in elem_style_map:
elem_style_map[elem] = cssutils.css.CSSStyleDeclaration()
# ascending sort of prop_lists based on specificity
props = sorted(props, key=lambda p: p['specificity'])
# for each prop_list, apply to CSSStyleDeclaration
for prop_list in map(lambda obj: obj['props'], props):
for prop in prop_list:
elem_style_map[elem].removeProperty(prop.name)
elem_style_map[elem].setProperty(prop.name, prop.value)
# apply rules to elements
for elem, style_declaration in elem_style_map.items():
if elem.has_key('style'):
elem['style'] = u'%s; %s' % (style_declaration.cssText.replace('\n', ' '), elem['style'])
else:
elem['style'] = style_declaration.cssText.replace('\n', ' ')
def _get_output(self):
"""Generate Unicode string of `self.soup` and set it to `self.output`
Returns self.output
"""
self.output = unicode(self.soup)
return self.output
def _clean_output(self):
"""Clean up after BeautifulSoup's output.
"""
if self.allow_conditional_comments:
matches = re.finditer('(<!--\[if .+\].+?<!\[endif\]-->)', self.output)
for match in matches:
comment = match.group()
comment = comment.replace('>', '>')
comment = comment.replace('<', '<')
self.output = (self.output[:match.start()] + comment +
self.output[match.end():])
def fromURL(url, log=None):
"""Shortcut Pynliner constructor. Equivalent to:
>>> Pynliner().from_url(someURL).run()
Returns processed HTML string.
"""
return Pynliner(log).from_url(url).run()
def fromString(string, log=None):
"""Shortcut Pynliner constructor. Equivalent to:
>>> Pynliner().from_string(someString).run()
Returns processed HTML string.
"""
return Pynliner(log).from_string(string).run()
| |
"""
mbed SDK
Copyright (c) 2011-2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Title: GNU ARM Eclipse (http://gnuarmeclipse.github.io) exporter.
Description: Creates a managed build project that can be imported by
the GNU ARM Eclipse plug-ins.
Author: Liviu Ionescu <ilg@livius.net>
"""
import os
import copy
import tempfile
import shutil
import copy
from subprocess import call, Popen, PIPE
from os.path import splitext, basename, relpath, dirname, exists, join, dirname
from random import randint
from json import load
from tools.export.exporters import Exporter, apply_supported_whitelist
from tools.options import list_profiles
from tools.targets import TARGET_MAP
from tools.utils import NotSupportedException
from tools.build_api import prepare_toolchain
# =============================================================================
class UID:
"""
Helper class, used to generate unique ids required by .cproject symbols.
"""
@property
def id(self):
return "%0.9u" % randint(0, 999999999)
# Global UID generator instance.
# Passed to the template engine, and referred as {{u.id}}.
# Each invocation generates a new number.
u = UID()
# =============================================================================
POST_BINARY_WHITELIST = set([
"TEENSY3_1Code.binary_hook",
"MCU_NRF51Code.binary_hook",
"LPCTargetCode.lpc_patch",
"LPC4088Code.binary_hook"
])
class GNUARMEclipse(Exporter):
NAME = 'GNU ARM Eclipse'
TOOLCHAIN = 'GCC_ARM'
@classmethod
def is_target_supported(cls, target_name):
target = TARGET_MAP[target_name]
return apply_supported_whitelist(
cls.TOOLCHAIN, POST_BINARY_WHITELIST, target)
# override
@property
def flags(self):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the parent function is that it does not
add macro definitions, since they are passed separately.
"""
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value
in self.toolchain.flags.iteritems()}
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
flags['c_flags'] += self.toolchain.get_config_option(config_header)
flags['cxx_flags'] += self.toolchain.get_config_option(
config_header)
return flags
def toolchain_flags(self, toolchain):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the above is that it takes a parameter.
"""
# Note: use the config options from the currently selected toolchain.
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value
in toolchain.flags.iteritems()}
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
header_options = self.toolchain.get_config_option(config_header)
flags['c_flags'] += header_options
flags['cxx_flags'] += header_options
return flags
def validate_resources(self):
if not self.resources.linker_script:
raise NotSupportedException("No linker script found.")
def create_jinja_ctx(self):
self.validate_resources()
self.resources.win_to_unix()
# TODO: use some logger to display additional info if verbose
libraries = []
# print 'libraries'
# print self.resources.libraries
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
self.system_libraries = [
'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
]
# Read in all profiles, we'll extract compiler options.
profiles = self.get_all_profiles()
profile_ids = [s.lower() for s in profiles]
profile_ids.sort()
# TODO: get the list from existing .cproject
build_folders = [s.capitalize() for s in profile_ids]
build_folders.append('BUILD')
# print build_folders
objects = [self.filter_dot(s) for s in self.resources.objects]
for bf in build_folders:
objects = [o for o in objects if not o.startswith(bf + '/')]
# print 'objects'
# print objects
self.compute_exclusions()
self.include_path = [
self.filter_dot(s) for s in self.resources.inc_dirs]
print 'Include folders: {0}'.format(len(self.include_path))
self.as_defines = self.toolchain.get_symbols(True)
self.c_defines = self.toolchain.get_symbols()
self.cpp_defines = self.c_defines
print 'Symbols: {0}'.format(len(self.c_defines))
self.ld_script = self.filter_dot(
self.resources.linker_script)
print 'Linker script: {0}'.format(self.ld_script)
self.options = {}
for id in profile_ids:
# There are 4 categories of options, a category common too
# all tools and a specific category for each of the tools.
opts = {}
opts['common'] = {}
opts['as'] = {}
opts['c'] = {}
opts['cpp'] = {}
opts['ld'] = {}
opts['id'] = id
opts['name'] = opts['id'].capitalize()
print
print 'Build configuration: {0}'.format(opts['name'])
profile = profiles[id]
# A small hack, do not bother with src_path again,
# pass an empty string to avoid crashing.
src_paths = ['']
target_name = self.toolchain.target.name
toolchain = prepare_toolchain(
src_paths, "", target_name, self.TOOLCHAIN, build_profile=[profile])
# Hack to fill in build_dir
toolchain.build_dir = self.toolchain.build_dir
flags = self.toolchain_flags(toolchain)
print 'Common flags:', ' '.join(flags['common_flags'])
print 'C++ flags:', ' '.join(flags['cxx_flags'])
print 'C flags:', ' '.join(flags['c_flags'])
print 'ASM flags:', ' '.join(flags['asm_flags'])
print 'Linker flags:', ' '.join(flags['ld_flags'])
# Most GNU ARM Eclipse options have a parent,
# either debug or release.
if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
opts['parent_id'] = 'debug'
else:
opts['parent_id'] = 'release'
self.process_options(opts, flags)
opts['as']['defines'] = self.as_defines
opts['c']['defines'] = self.c_defines
opts['cpp']['defines'] = self.cpp_defines
opts['common']['include_paths'] = self.include_path
opts['common']['excluded_folders'] = '|'.join(
self.excluded_folders)
opts['ld']['library_paths'] = [
self.filter_dot(s) for s in self.resources.lib_dirs]
opts['ld']['object_files'] = objects
opts['ld']['user_libraries'] = libraries
opts['ld']['system_libraries'] = self.system_libraries
opts['ld']['script'] = join(id.capitalize(),
"linker-script-%s.ld" % id)
opts['cpp_cmd'] = '"{}"'.format(toolchain.preproc[0]) + " " + " ".join(toolchain.preproc[1:])
# Unique IDs used in multiple places.
# Those used only once are implemented with {{u.id}}.
uid = {}
uid['config'] = u.id
uid['tool_c_compiler'] = u.id
uid['tool_c_compiler_input'] = u.id
uid['tool_cpp_compiler'] = u.id
uid['tool_cpp_compiler_input'] = u.id
opts['uid'] = uid
self.options[id] = opts
jinja_ctx = {
'name': self.project_name,
'ld_script': self.ld_script,
# Compiler & linker command line options
'options': self.options,
# Must be an object with an `id` property, which
# will be called repeatedly, to generate multiple UIDs.
'u': u,
}
return jinja_ctx
# override
def generate(self):
"""
Generate the .project and .cproject files.
"""
jinja_ctx = self.create_jinja_ctx()
print
print 'Create a GNU ARM Eclipse C++ managed project'
print 'Project name: {0}'.format(self.project_name)
print 'Target: {0}'.format(self.toolchain.target.name)
print 'Toolchain: {0}'.format(self.TOOLCHAIN)
self.gen_file('gnuarmeclipse/.project.tmpl', jinja_ctx,
'.project', trim_blocks=True, lstrip_blocks=True)
self.gen_file('gnuarmeclipse/.cproject.tmpl', jinja_ctx,
'.cproject', trim_blocks=True, lstrip_blocks=True)
self.gen_file('gnuarmeclipse/makefile.targets.tmpl', jinja_ctx,
'makefile.targets', trim_blocks=True, lstrip_blocks=True)
self.gen_file('gnuarmeclipse/mbedignore.tmpl', jinja_ctx, '.mbedignore')
print
print 'Done. Import the \'{0}\' project in Eclipse.'.format(self.project_name)
# override
@staticmethod
def build(project_name, log_name="build_log.txt", cleanup=True):
"""
Headless build an Eclipse project.
The following steps are performed:
- a temporary workspace is created,
- the project is imported,
- a clean build of all configurations is performed and
- the temporary workspace is removed.
The build results are in the Debug & Release folders.
All executables (eclipse & toolchain) must be in the PATH.
The general method to start a headless Eclipse build is:
$ eclipse \
--launcher.suppressErrors \
-nosplash \
-application org.eclipse.cdt.managedbuilder.core.headlessbuild \
-data /path/to/workspace \
-import /path/to/project \
-cleanBuild "project[/configuration] | all"
"""
# TODO: possibly use the log file.
# Create a temporary folder for the workspace.
tmp_folder = tempfile.mkdtemp()
cmd = [
'eclipse',
'--launcher.suppressErrors',
'-nosplash',
'-application org.eclipse.cdt.managedbuilder.core.headlessbuild',
'-data', tmp_folder,
'-import', os.getcwd(),
'-cleanBuild', project_name
]
p = Popen(' '.join(cmd), shell=True, stdout=PIPE, stderr=PIPE)
out, err = p.communicate()
ret_code = p.returncode
stdout_string = "=" * 10 + "STDOUT" + "=" * 10 + "\n"
err_string = "=" * 10 + "STDERR" + "=" * 10 + "\n"
err_string += err
ret_string = "SUCCESS\n"
if ret_code != 0:
ret_string += "FAILURE\n"
print "%s\n%s\n%s\n%s" % (stdout_string, out, err_string, ret_string)
if log_name:
# Write the output to the log file
with open(log_name, 'w+') as f:
f.write(stdout_string)
f.write(out)
f.write(err_string)
f.write(ret_string)
# Cleanup the exported and built files
if cleanup:
if exists(log_name):
os.remove(log_name)
os.remove('.project')
os.remove('.cproject')
if exists('Debug'):
shutil.rmtree('Debug')
if exists('Release'):
shutil.rmtree('Release')
if exists('makefile.targets'):
os.remove('makefile.targets')
# Always remove the temporary folder.
if exists(tmp_folder):
shutil.rmtree(tmp_folder)
if ret_code == 0:
# Return Success
return 0
# Seems like something went wrong.
return -1
# -------------------------------------------------------------------------
@staticmethod
def get_all_profiles():
tools_path = dirname(dirname(dirname(__file__)))
file_names = [join(tools_path, "profiles", fn) for fn in os.listdir(
join(tools_path, "profiles")) if fn.endswith(".json")]
# print file_names
profile_names = [basename(fn).replace(".json", "")
for fn in file_names]
# print profile_names
profiles = {}
for fn in file_names:
content = load(open(fn))
profile_name = basename(fn).replace(".json", "")
profiles[profile_name] = content
return profiles
# -------------------------------------------------------------------------
# Process source files/folders exclusions.
def compute_exclusions(self):
"""
With the project root as the only source folder known to CDT,
based on the list of source files, compute the folders to not
be included in the build.
The steps are:
- get the list of source folders, as dirname(source_file)
- compute the top folders (subfolders of the project folder)
- iterate all subfolders and add them to a tree, with all
nodes markes as 'not used'
- iterate the source folders and mark them as 'used' in the
tree, including all intermediate nodes
- recurse the tree and collect all unused folders; descend
the hierarchy only for used nodes
"""
source_folders = [self.filter_dot(s) for s in set(dirname(
src) for src in self.resources.c_sources + self.resources.cpp_sources + self.resources.s_sources)]
self.excluded_folders = set(self.resources.ignored_dirs) - set(self.resources.inc_dirs)
print 'Source folders: {0}, with {1} exclusions'.format(len(source_folders), len(self.excluded_folders))
# -------------------------------------------------------------------------
@staticmethod
def filter_dot(str):
"""
Remove the './' prefix, if present.
This function assumes that resources.win_to_unix()
replaced all windows backslashes with slashes.
"""
if str == None:
return None
if str[:2] == './':
return str[2:]
return str
# -------------------------------------------------------------------------
def dump_tree(self, nodes, depth=0):
for k in nodes.keys():
node = nodes[k]
parent_name = node['parent'][
'name'] if 'parent' in node.keys() else ''
print ' ' * depth, node['name'], node['is_used'], parent_name
if len(node['children'].keys()) != 0:
self.dump_tree(node['children'], depth + 1)
def dump_paths(self, nodes, depth=0):
for k in nodes.keys():
node = nodes[k]
parts = []
while True:
parts.insert(0, node['name'])
if 'parent' not in node:
break
node = node['parent']
path = '/'.join(parts)
print path, nodes[k]['is_used']
self.dump_paths(nodes[k]['children'], depth + 1)
# -------------------------------------------------------------------------
def process_options(self, opts, flags_in):
"""
CDT managed projects store lots of build options in separate
variables, with separate IDs in the .cproject file.
When the CDT build is started, all these options are brought
together to compose the compiler and linker command lines.
Here the process is reversed, from the compiler and linker
command lines, the options are identified and various flags are
set to control the template generation process.
Once identified, the options are removed from the command lines.
The options that were not identified are options that do not
have CDT equivalents and will be passed in the 'Other options'
categories.
Although this process does not have a very complicated logic,
given the large number of explicit configuration options
used by the GNU ARM Eclipse managed build plug-in, it is tedious...
"""
# Make a copy of the flags, to be one by one removed after processing.
flags = copy.deepcopy(flags_in)
if False:
print
print 'common_flags', flags['common_flags']
print 'asm_flags', flags['asm_flags']
print 'c_flags', flags['c_flags']
print 'cxx_flags', flags['cxx_flags']
print 'ld_flags', flags['ld_flags']
# Initialise the 'last resort' options where all unrecognised
# options will be collected.
opts['as']['other'] = ''
opts['c']['other'] = ''
opts['cpp']['other'] = ''
opts['ld']['other'] = ''
MCPUS = {
'Cortex-M0': {'mcpu': 'cortex-m0', 'fpu_unit': None},
'Cortex-M0+': {'mcpu': 'cortex-m0plus', 'fpu_unit': None},
'Cortex-M1': {'mcpu': 'cortex-m1', 'fpu_unit': None},
'Cortex-M3': {'mcpu': 'cortex-m3', 'fpu_unit': None},
'Cortex-M4': {'mcpu': 'cortex-m4', 'fpu_unit': None},
'Cortex-M4F': {'mcpu': 'cortex-m4', 'fpu_unit': 'fpv4spd16'},
'Cortex-M7': {'mcpu': 'cortex-m7', 'fpu_unit': None},
'Cortex-M7F': {'mcpu': 'cortex-m7', 'fpu_unit': 'fpv4spd16'},
'Cortex-M7FD': {'mcpu': 'cortex-m7', 'fpu_unit': 'fpv5d16'},
'Cortex-A9': {'mcpu': 'cortex-a9', 'fpu_unit': 'vfpv3'}
}
# Remove options that are supplied by CDT
self.remove_option(flags['common_flags'], '-c')
self.remove_option(flags['common_flags'], '-MMD')
# As 'plan B', get the CPU from the target definition.
core = self.toolchain.target.core
opts['common']['arm.target.family'] = None
# cortex-m0, cortex-m0-small-multiply, cortex-m0plus,
# cortex-m0plus-small-multiply, cortex-m1, cortex-m1-small-multiply,
# cortex-m3, cortex-m4, cortex-m7.
str = self.find_options(flags['common_flags'], '-mcpu=')
if str != None:
opts['common']['arm.target.family'] = str[len('-mcpu='):]
self.remove_option(flags['common_flags'], str)
self.remove_option(flags['ld_flags'], str)
else:
if core not in MCPUS:
raise NotSupportedException(
'Target core {0} not supported.'.format(core))
opts['common']['arm.target.family'] = MCPUS[core]['mcpu']
opts['common']['arm.target.arch'] = 'none'
str = self.find_options(flags['common_flags'], '-march=')
arch = str[len('-march='):]
archs = {'armv6-m': 'armv6-m', 'armv7-m': 'armv7-m', 'armv7-a': 'armv7-a'}
if arch in archs:
opts['common']['arm.target.arch'] = archs[arch]
self.remove_option(flags['common_flags'], str)
opts['common']['arm.target.instructionset'] = 'thumb'
if '-mthumb' in flags['common_flags']:
self.remove_option(flags['common_flags'], '-mthumb')
self.remove_option(flags['ld_flags'], '-mthumb')
elif '-marm' in flags['common_flags']:
opts['common']['arm.target.instructionset'] = 'arm'
self.remove_option(flags['common_flags'], '-marm')
self.remove_option(flags['ld_flags'], '-marm')
opts['common']['arm.target.thumbinterwork'] = False
if '-mthumb-interwork' in flags['common_flags']:
opts['common']['arm.target.thumbinterwork'] = True
self.remove_option(flags['common_flags'], '-mthumb-interwork')
opts['common']['arm.target.endianness'] = None
if '-mlittle-endian' in flags['common_flags']:
opts['common']['arm.target.endianness'] = 'little'
self.remove_option(flags['common_flags'], '-mlittle-endian')
elif '-mbig-endian' in flags['common_flags']:
opts['common']['arm.target.endianness'] = 'big'
self.remove_option(flags['common_flags'], '-mbig-endian')
opts['common']['arm.target.fpu.unit'] = None
# default, fpv4spd16, fpv5d16, fpv5spd16
str = self.find_options(flags['common_flags'], '-mfpu=')
if str != None:
fpu = str[len('-mfpu='):]
fpus = {
'fpv4-sp-d16': 'fpv4spd16',
'fpv5-d16': 'fpv5d16',
'fpv5-sp-d16': 'fpv5spd16'
}
if fpu in fpus:
opts['common']['arm.target.fpu.unit'] = fpus[fpu]
self.remove_option(flags['common_flags'], str)
self.remove_option(flags['ld_flags'], str)
if opts['common']['arm.target.fpu.unit'] == None:
if core not in MCPUS:
raise NotSupportedException(
'Target core {0} not supported.'.format(core))
if MCPUS[core]['fpu_unit']:
opts['common'][
'arm.target.fpu.unit'] = MCPUS[core]['fpu_unit']
# soft, softfp, hard.
str = self.find_options(flags['common_flags'], '-mfloat-abi=')
if str != None:
opts['common']['arm.target.fpu.abi'] = str[
len('-mfloat-abi='):]
self.remove_option(flags['common_flags'], str)
self.remove_option(flags['ld_flags'], str)
opts['common']['arm.target.unalignedaccess'] = None
if '-munaligned-access' in flags['common_flags']:
opts['common']['arm.target.unalignedaccess'] = 'enabled'
self.remove_option(flags['common_flags'], '-munaligned-access')
elif '-mno-unaligned-access' in flags['common_flags']:
opts['common']['arm.target.unalignedaccess'] = 'disabled'
self.remove_option(flags['common_flags'], '-mno-unaligned-access')
# Default optimisation level for Release.
opts['common']['optimization.level'] = '-Os'
# If the project defines an optimisation level, it is used
# only for the Release configuration, the Debug one used '-Og'.
str = self.find_options(flags['common_flags'], '-O')
if str != None:
levels = {
'-O0': 'none', '-O1': 'optimize', '-O2': 'more',
'-O3': 'most', '-Os': 'size', '-Og': 'debug'
}
if str in levels:
opts['common']['optimization.level'] = levels[str]
self.remove_option(flags['common_flags'], str)
include_files = []
for all_flags in [flags['common_flags'], flags['c_flags'], flags['cxx_flags']]:
while '-include' in all_flags:
ix = all_flags.index('-include')
str = all_flags[ix + 1]
if str not in include_files:
include_files.append(str)
self.remove_option(all_flags, '-include')
self.remove_option(all_flags, str)
opts['common']['include_files'] = include_files
if '-ansi' in flags['c_flags']:
opts['c']['compiler.std'] = '-ansi'
self.remove_option(flags['c_flags'], str)
else:
str = self.find_options(flags['c_flags'], '-std')
std = str[len('-std='):]
c_std = {
'c90': 'c90', 'c89': 'c90', 'gnu90': 'gnu90', 'gnu89': 'gnu90',
'c99': 'c99', 'c9x': 'c99', 'gnu99': 'gnu99', 'gnu9x': 'gnu98',
'c11': 'c11', 'c1x': 'c11', 'gnu11': 'gnu11', 'gnu1x': 'gnu11'
}
if std in c_std:
opts['c']['compiler.std'] = c_std[std]
self.remove_option(flags['c_flags'], str)
if '-ansi' in flags['cxx_flags']:
opts['cpp']['compiler.std'] = '-ansi'
self.remove_option(flags['cxx_flags'], str)
else:
str = self.find_options(flags['cxx_flags'], '-std')
std = str[len('-std='):]
cpp_std = {
'c++98': 'cpp98', 'c++03': 'cpp98',
'gnu++98': 'gnucpp98', 'gnu++03': 'gnucpp98',
'c++0x': 'cpp0x', 'gnu++0x': 'gnucpp0x',
'c++11': 'cpp11', 'gnu++11': 'gnucpp11',
'c++1y': 'cpp1y', 'gnu++1y': 'gnucpp1y',
'c++14': 'cpp14', 'gnu++14': 'gnucpp14',
'c++1z': 'cpp1z', 'gnu++1z': 'gnucpp1z',
}
if std in cpp_std:
opts['cpp']['compiler.std'] = cpp_std[std]
self.remove_option(flags['cxx_flags'], str)
# Common optimisation options.
optimization_options = {
'-fmessage-length=0': 'optimization.messagelength',
'-fsigned-char': 'optimization.signedchar',
'-ffunction-sections': 'optimization.functionsections',
'-fdata-sections': 'optimization.datasections',
'-fno-common': 'optimization.nocommon',
'-fno-inline-functions': 'optimization.noinlinefunctions',
'-ffreestanding': 'optimization.freestanding',
'-fno-builtin': 'optimization.nobuiltin',
'-fsingle-precision-constant': 'optimization.spconstant',
'-fPIC': 'optimization.PIC',
'-fno-move-loop-invariants': 'optimization.nomoveloopinvariants',
}
for option in optimization_options:
opts['common'][optimization_options[option]] = False
if option in flags['common_flags']:
opts['common'][optimization_options[option]] = True
self.remove_option(flags['common_flags'], option)
# Common warning options.
warning_options = {
'-fsyntax-only': 'warnings.syntaxonly',
'-pedantic': 'warnings.pedantic',
'-pedantic-errors': 'warnings.pedanticerrors',
'-w': 'warnings.nowarn',
'-Wunused': 'warnings.unused',
'-Wuninitialized': 'warnings.uninitialized',
'-Wall': 'warnings.allwarn',
'-Wextra': 'warnings.extrawarn',
'-Wmissing-declarations': 'warnings.missingdeclaration',
'-Wconversion': 'warnings.conversion',
'-Wpointer-arith': 'warnings.pointerarith',
'-Wpadded': 'warnings.padded',
'-Wshadow': 'warnings.shadow',
'-Wlogical-op': 'warnings.logicalop',
'-Waggregate-return': 'warnings.agreggatereturn',
'-Wfloat-equal': 'warnings.floatequal',
'-Werror': 'warnings.toerrors',
}
for option in warning_options:
opts['common'][warning_options[option]] = False
if option in flags['common_flags']:
opts['common'][warning_options[option]] = True
self.remove_option(flags['common_flags'], option)
# Common debug options.
debug_levels = {
'-g': 'default',
'-g1': 'minimal',
'-g3': 'max',
}
opts['common']['debugging.level'] = 'none'
for option in debug_levels:
if option in flags['common_flags']:
opts['common'][
'debugging.level'] = debug_levels[option]
self.remove_option(flags['common_flags'], option)
debug_formats = {
'-ggdb': 'gdb',
'-gstabs': 'stabs',
'-gstabs+': 'stabsplus',
'-gdwarf-2': 'dwarf2',
'-gdwarf-3': 'dwarf3',
'-gdwarf-4': 'dwarf4',
'-gdwarf-5': 'dwarf5',
}
opts['common']['debugging.format'] = ''
for option in debug_levels:
if option in flags['common_flags']:
opts['common'][
'debugging.format'] = debug_formats[option]
self.remove_option(flags['common_flags'], option)
opts['common']['debugging.prof'] = False
if '-p' in flags['common_flags']:
opts['common']['debugging.prof'] = True
self.remove_option(flags['common_flags'], '-p')
opts['common']['debugging.gprof'] = False
if '-pg' in flags['common_flags']:
opts['common']['debugging.gprof'] = True
self.remove_option(flags['common_flags'], '-gp')
# Assembler options.
opts['as']['usepreprocessor'] = False
while '-x' in flags['asm_flags']:
ix = flags['asm_flags'].index('-x')
str = flags['asm_flags'][ix + 1]
if str == 'assembler-with-cpp':
opts['as']['usepreprocessor'] = True
else:
# Collect all other assembler options.
opts['as']['other'] += ' -x ' + str
self.remove_option(flags['asm_flags'], '-x')
self.remove_option(flags['asm_flags'], 'assembler-with-cpp')
opts['as']['nostdinc'] = False
if '-nostdinc' in flags['asm_flags']:
opts['as']['nostdinc'] = True
self.remove_option(flags['asm_flags'], '-nostdinc')
opts['as']['verbose'] = False
if '-v' in flags['asm_flags']:
opts['as']['verbose'] = True
self.remove_option(flags['asm_flags'], '-v')
# C options.
opts['c']['nostdinc'] = False
if '-nostdinc' in flags['c_flags']:
opts['c']['nostdinc'] = True
self.remove_option(flags['c_flags'], '-nostdinc')
opts['c']['verbose'] = False
if '-v' in flags['c_flags']:
opts['c']['verbose'] = True
self.remove_option(flags['c_flags'], '-v')
warning_options = {
'-Wmissing-prototypes': 'warnings.missingprototypes',
'-Wstrict-prototypes': 'warnings.strictprototypes',
'-Wbad-function-cast': 'warnings.badfunctioncast',
}
for option in warning_options:
opts['c'][warning_options[option]] = False
if option in flags['common_flags']:
opts['c'][warning_options[option]] = True
self.remove_option(flags['common_flags'], option)
# C++ options.
opts['cpp']['nostdinc'] = False
if '-nostdinc' in flags['cxx_flags']:
opts['cpp']['nostdinc'] = True
self.remove_option(flags['cxx_flags'], '-nostdinc')
opts['cpp']['nostdincpp'] = False
if '-nostdinc++' in flags['cxx_flags']:
opts['cpp']['nostdincpp'] = True
self.remove_option(flags['cxx_flags'], '-nostdinc++')
optimization_options = {
'-fno-exceptions': 'optimization.noexceptions',
'-fno-rtti': 'optimization.nortti',
'-fno-use-cxa-atexit': 'optimization.nousecxaatexit',
'-fno-threadsafe-statics': 'optimization.nothreadsafestatics',
}
for option in optimization_options:
opts['cpp'][optimization_options[option]] = False
if option in flags['cxx_flags']:
opts['cpp'][optimization_options[option]] = True
self.remove_option(flags['cxx_flags'], option)
if option in flags['common_flags']:
opts['cpp'][optimization_options[option]] = True
self.remove_option(flags['common_flags'], option)
warning_options = {
'-Wabi': 'warnabi',
'-Wctor-dtor-privacy': 'warnings.ctordtorprivacy',
'-Wnoexcept': 'warnings.noexcept',
'-Wnon-virtual-dtor': 'warnings.nonvirtualdtor',
'-Wstrict-null-sentinel': 'warnings.strictnullsentinel',
'-Wsign-promo': 'warnings.signpromo',
'-Weffc++': 'warneffc',
}
for option in warning_options:
opts['cpp'][warning_options[option]] = False
if option in flags['cxx_flags']:
opts['cpp'][warning_options[option]] = True
self.remove_option(flags['cxx_flags'], option)
if option in flags['common_flags']:
opts['cpp'][warning_options[option]] = True
self.remove_option(flags['common_flags'], option)
opts['cpp']['verbose'] = False
if '-v' in flags['cxx_flags']:
opts['cpp']['verbose'] = True
self.remove_option(flags['cxx_flags'], '-v')
# Linker options.
linker_options = {
'-nostartfiles': 'nostart',
'-nodefaultlibs': 'nodeflibs',
'-nostdlib': 'nostdlibs',
}
for option in linker_options:
opts['ld'][linker_options[option]] = False
if option in flags['ld_flags']:
opts['ld'][linker_options[option]] = True
self.remove_option(flags['ld_flags'], option)
opts['ld']['gcsections'] = False
if '-Wl,--gc-sections' in flags['ld_flags']:
opts['ld']['gcsections'] = True
self.remove_option(flags['ld_flags'], '-Wl,--gc-sections')
opts['ld']['flags'] = []
to_remove = []
for opt in flags['ld_flags']:
if opt.startswith('-Wl,--wrap,'):
opts['ld']['flags'].append(
'--wrap=' + opt[len('-Wl,--wrap,'):])
to_remove.append(opt)
for opt in to_remove:
self.remove_option(flags['ld_flags'], opt)
# Other tool remaining options are separated by category.
opts['as']['otherwarnings'] = self.find_options(
flags['asm_flags'], '-W')
opts['c']['otherwarnings'] = self.find_options(
flags['c_flags'], '-W')
opts['c']['otheroptimizations'] = self.find_options(flags[
'c_flags'], '-f')
opts['cpp']['otherwarnings'] = self.find_options(
flags['cxx_flags'], '-W')
opts['cpp']['otheroptimizations'] = self.find_options(
flags['cxx_flags'], '-f')
# Other common remaining options are separated by category.
opts['common']['optimization.other'] = self.find_options(
flags['common_flags'], '-f')
opts['common']['warnings.other'] = self.find_options(
flags['common_flags'], '-W')
# Remaining common flags are added to each tool.
opts['as']['other'] += ' ' + \
' '.join(flags['common_flags']) + ' ' + \
' '.join(flags['asm_flags'])
opts['c']['other'] += ' ' + \
' '.join(flags['common_flags']) + ' ' + ' '.join(flags['c_flags'])
opts['cpp']['other'] += ' ' + \
' '.join(flags['common_flags']) + ' ' + \
' '.join(flags['cxx_flags'])
opts['ld']['other'] += ' ' + \
' '.join(flags['common_flags']) + ' ' + ' '.join(flags['ld_flags'])
if len(self.system_libraries) > 0:
opts['ld']['other'] += ' -Wl,--start-group '
opts['ld'][
'other'] += ' '.join('-l' + s for s in self.system_libraries)
opts['ld']['other'] += ' -Wl,--end-group '
# Strip all 'other' flags, since they might have leading spaces.
opts['as']['other'] = opts['as']['other'].strip()
opts['c']['other'] = opts['c']['other'].strip()
opts['cpp']['other'] = opts['cpp']['other'].strip()
opts['ld']['other'] = opts['ld']['other'].strip()
if False:
print
print opts
print
print 'common_flags', flags['common_flags']
print 'asm_flags', flags['asm_flags']
print 'c_flags', flags['c_flags']
print 'cxx_flags', flags['cxx_flags']
print 'ld_flags', flags['ld_flags']
@staticmethod
def find_options(lst, option):
tmp = [str for str in lst if str.startswith(option)]
if len(tmp) > 0:
return tmp[0]
else:
return None
@staticmethod
def find_options(lst, prefix):
other = ''
opts = [str for str in lst if str.startswith(prefix)]
if len(opts) > 0:
for opt in opts:
other += ' ' + opt
GNUARMEclipse.remove_option(lst, opt)
return other.strip()
@staticmethod
def remove_option(lst, option):
if option in lst:
lst.remove(option)
# =============================================================================
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._sql_pool_vulnerability_assessment_rule_baselines_operations import build_create_or_update_request, build_delete_request, build_get_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SqlPoolVulnerabilityAssessmentRuleBaselinesOperations:
"""SqlPoolVulnerabilityAssessmentRuleBaselinesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
rule_id: str,
baseline_name: Union[str, "_models.VulnerabilityAssessmentPolicyBaselineName"],
parameters: "_models.SqlPoolVulnerabilityAssessmentRuleBaseline",
**kwargs: Any
) -> "_models.SqlPoolVulnerabilityAssessmentRuleBaseline":
"""Creates or updates a Sql pool's vulnerability assessment rule baseline.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentName
:param rule_id: The vulnerability assessment rule ID.
:type rule_id: str
:param baseline_name: The name of the vulnerability assessment rule baseline (default implies a
baseline on a Sql pool level rule and master for workspace level rule).
:type baseline_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentPolicyBaselineName
:param parameters: The requested rule baseline resource.
:type parameters: ~azure.mgmt.synapse.models.SqlPoolVulnerabilityAssessmentRuleBaseline
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlPoolVulnerabilityAssessmentRuleBaseline, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.SqlPoolVulnerabilityAssessmentRuleBaseline
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlPoolVulnerabilityAssessmentRuleBaseline"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'SqlPoolVulnerabilityAssessmentRuleBaseline')
request = build_create_or_update_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
sql_pool_name=sql_pool_name,
vulnerability_assessment_name=vulnerability_assessment_name,
rule_id=rule_id,
baseline_name=baseline_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SqlPoolVulnerabilityAssessmentRuleBaseline', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/rules/{ruleId}/baselines/{baselineName}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
rule_id: str,
baseline_name: Union[str, "_models.VulnerabilityAssessmentPolicyBaselineName"],
**kwargs: Any
) -> None:
"""Removes the database's vulnerability assessment rule baseline.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentName
:param rule_id: The vulnerability assessment rule ID.
:type rule_id: str
:param baseline_name: The name of the vulnerability assessment rule baseline (default implies a
baseline on a Sql pool level rule and master for workspace level rule).
:type baseline_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentPolicyBaselineName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
sql_pool_name=sql_pool_name,
vulnerability_assessment_name=vulnerability_assessment_name,
rule_id=rule_id,
baseline_name=baseline_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/rules/{ruleId}/baselines/{baselineName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
rule_id: str,
baseline_name: Union[str, "_models.VulnerabilityAssessmentPolicyBaselineName"],
**kwargs: Any
) -> "_models.SqlPoolVulnerabilityAssessmentRuleBaseline":
"""Gets a SqlPool's vulnerability assessment rule baseline.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentName
:param rule_id: The vulnerability assessment rule ID.
:type rule_id: str
:param baseline_name: The name of the vulnerability assessment rule baseline (default implies a
baseline on a Sql pool level rule and master for server level rule).
:type baseline_name: str or
~azure.mgmt.synapse.models.VulnerabilityAssessmentPolicyBaselineName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlPoolVulnerabilityAssessmentRuleBaseline, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.SqlPoolVulnerabilityAssessmentRuleBaseline
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlPoolVulnerabilityAssessmentRuleBaseline"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
sql_pool_name=sql_pool_name,
vulnerability_assessment_name=vulnerability_assessment_name,
rule_id=rule_id,
baseline_name=baseline_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SqlPoolVulnerabilityAssessmentRuleBaseline', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/rules/{ruleId}/baselines/{baselineName}'} # type: ignore
| |
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A class to keep track of devices across builds and report state."""
import logging
import optparse
import os
import psutil
import re
import signal
import smtplib
import subprocess
import sys
import time
import urllib
import bb_annotations
import bb_utils
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, 'util', 'lib',
'common'))
import perf_tests_results_helper # pylint: disable=F0401
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import android_commands
from pylib import constants
from pylib.cmd_helper import GetCmdOutput
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_list
from pylib.device import device_utils
def DeviceInfo(serial, options):
"""Gathers info on a device via various adb calls.
Args:
serial: The serial of the attached device to construct info about.
Returns:
Tuple of device type, build id, report as a string, error messages, and
boolean indicating whether or not device can be used for testing.
"""
device_adb = device_utils.DeviceUtils(serial)
device_type = device_adb.old_interface.GetBuildProduct()
device_build = device_adb.old_interface.GetBuildId()
device_build_type = device_adb.old_interface.GetBuildType()
device_product_name = device_adb.old_interface.GetProductName()
try:
battery_info = device_adb.old_interface.GetBatteryInfo()
except Exception as e:
battery_info = {}
logging.error('Unable to obtain battery info for %s, %s', serial, e)
def _GetData(re_expression, line, lambda_function=lambda x:x):
if not line:
return 'Unknown'
found = re.findall(re_expression, line)
if found and len(found):
return lambda_function(found[0])
return 'Unknown'
battery_level = int(battery_info.get('level', 100))
imei_slice = _GetData('Device ID = (\d+)',
device_adb.old_interface.GetSubscriberInfo(),
lambda x: x[-6:])
report = ['Device %s (%s)' % (serial, device_type),
' Build: %s (%s)' %
(device_build, device_adb.old_interface.GetBuildFingerprint()),
' Current Battery Service state: ',
'\n'.join([' %s: %s' % (k, v)
for k, v in battery_info.iteritems()]),
' IMEI slice: %s' % imei_slice,
' Wifi IP: %s' % device_adb.old_interface.GetWifiIP(),
'']
errors = []
if battery_level < 15:
errors += ['Device critically low in battery. Turning off device.']
if not options.no_provisioning_check:
setup_wizard_disabled = (
device_adb.old_interface.GetSetupWizardStatus() == 'DISABLED')
if not setup_wizard_disabled and device_build_type != 'user':
errors += ['Setup wizard not disabled. Was it provisioned correctly?']
if (device_product_name == 'mantaray' and
battery_info.get('AC powered', None) != 'true'):
errors += ['Mantaray device not connected to AC power.']
# Turn off devices with low battery.
if battery_level < 15:
try:
device_adb.EnableRoot()
except device_errors.CommandFailedError as e:
# Attempt shutdown anyway.
# TODO(jbudorick) Handle this exception appropriately after interface
# conversions are finished.
logging.error(str(e))
device_adb.old_interface.Shutdown()
full_report = '\n'.join(report)
return device_type, device_build, battery_level, full_report, errors, True
def CheckForMissingDevices(options, adb_online_devs):
"""Uses file of previous online devices to detect broken phones.
Args:
options: out_dir parameter of options argument is used as the base
directory to load and update the cache file.
adb_online_devs: A list of serial numbers of the currently visible
and online attached devices.
"""
# TODO(navabi): remove this once the bug that causes different number
# of devices to be detected between calls is fixed.
logger = logging.getLogger()
logger.setLevel(logging.INFO)
out_dir = os.path.abspath(options.out_dir)
# last_devices denotes all known devices prior to this run
last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
last_missing_devices_path = os.path.join(out_dir,
device_list.LAST_MISSING_DEVICES_FILENAME)
try:
last_devices = device_list.GetPersistentDeviceList(last_devices_path)
except IOError:
# Ignore error, file might not exist
last_devices = []
try:
last_missing_devices = device_list.GetPersistentDeviceList(
last_missing_devices_path)
except IOError:
last_missing_devices = []
missing_devs = list(set(last_devices) - set(adb_online_devs))
new_missing_devs = list(set(missing_devs) - set(last_missing_devices))
if new_missing_devs:
logging.info('new_missing_devs %s' % new_missing_devs)
devices_missing_msg = '%d devices not detected.' % len(missing_devs)
bb_annotations.PrintSummaryText(devices_missing_msg)
from_address = 'chrome-bot@chromium.org'
to_address = 'chrome-labs-tech-ticket@google.com'
subject = 'Devices offline on %s' % os.environ.get('BUILDBOT_SLAVENAME')
msg = ('Please reboot the following devices:\n%s' %
'\n'.join(map(str,new_missing_devs)))
SendEmail(from_address, to_address, subject, msg)
all_known_devices = list(set(adb_online_devs) | set(last_devices))
device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs)
if not all_known_devices:
# This can happen if for some reason the .last_devices file is not
# present or if it was empty.
return ['No online devices. Have any devices been plugged in?']
if missing_devs:
devices_missing_msg = '%d devices not detected.' % len(missing_devs)
bb_annotations.PrintSummaryText(devices_missing_msg)
# TODO(navabi): Debug by printing both output from GetCmdOutput and
# GetAttachedDevices to compare results.
crbug_link = ('https://code.google.com/p/chromium/issues/entry?summary='
'%s&comment=%s&labels=Restrict-View-Google,OS-Android,Infra' %
(urllib.quote('Device Offline'),
urllib.quote('Buildbot: %s %s\n'
'Build: %s\n'
'(please don\'t change any labels)' %
(os.environ.get('BUILDBOT_BUILDERNAME'),
os.environ.get('BUILDBOT_SLAVENAME'),
os.environ.get('BUILDBOT_BUILDNUMBER')))))
return ['Current online devices: %s' % adb_online_devs,
'%s are no longer visible. Were they removed?\n' % missing_devs,
'SHERIFF:\n',
'@@@STEP_LINK@Click here to file a bug@%s@@@\n' % crbug_link,
'Cache file: %s\n\n' % last_devices_path,
'adb devices: %s' % GetCmdOutput(['adb', 'devices']),
'adb devices(GetAttachedDevices): %s' %
android_commands.GetAttachedDevices()]
else:
new_devs = set(adb_online_devs) - set(last_devices)
if new_devs and os.path.exists(last_devices_path):
bb_annotations.PrintWarning()
bb_annotations.PrintSummaryText(
'%d new devices detected' % len(new_devs))
print ('New devices detected %s. And now back to your '
'regularly scheduled program.' % list(new_devs))
def SendEmail(from_address, to_address, subject, msg):
msg_body = '\r\n'.join(['From: %s' % from_address, 'To: %s' % to_address,
'Subject: %s' % subject, '', msg])
try:
server = smtplib.SMTP('localhost')
server.sendmail(from_address, [to_address], msg_body)
server.quit()
except Exception as e:
print 'Failed to send alert email. Error: %s' % e
def RestartUsb():
if not os.path.isfile('/usr/bin/restart_usb'):
print ('ERROR: Could not restart usb. /usr/bin/restart_usb not installed '
'on host (see BUG=305769).')
return False
lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE)
lsusb_output, _ = lsusb_proc.communicate()
if lsusb_proc.returncode:
print ('Error: Could not get list of USB ports (i.e. lsusb).')
return lsusb_proc.returncode
usb_devices = [re.findall('Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0]
for lsusb_line in lsusb_output.strip().split('\n')]
all_restarted = True
# Walk USB devices from leaves up (i.e reverse sorted) restarting the
# connection. If a parent node (e.g. usb hub) is restarted before the
# devices connected to it, the (bus, dev) for the hub can change, making the
# output we have wrong. This way we restart the devices before the hub.
for (bus, dev) in reversed(sorted(usb_devices)):
# Can not restart root usb connections
if dev != '001':
return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev])
if return_code:
print 'Error restarting USB device /dev/bus/usb/%s/%s' % (bus, dev)
all_restarted = False
else:
print 'Restarted USB device /dev/bus/usb/%s/%s' % (bus, dev)
return all_restarted
def KillAllAdb():
def GetAllAdb():
for p in psutil.process_iter():
try:
if 'adb' in p.name:
yield p
except psutil.error.NoSuchProcess:
pass
for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
for p in GetAllAdb():
try:
print 'kill %d %d (%s [%s])' % (sig, p.pid, p.name,
' '.join(p.cmdline))
p.send_signal(sig)
except psutil.error.NoSuchProcess:
pass
for p in GetAllAdb():
try:
print 'Unable to kill %d (%s [%s])' % (p.pid, p.name, ' '.join(p.cmdline))
except psutil.error.NoSuchProcess:
pass
def main():
parser = optparse.OptionParser()
parser.add_option('', '--out-dir',
help='Directory where the device path is stored',
default=os.path.join(constants.DIR_SOURCE_ROOT, 'out'))
parser.add_option('--no-provisioning-check', action='store_true',
help='Will not check if devices are provisioned properly.')
parser.add_option('--device-status-dashboard', action='store_true',
help='Output device status data for dashboard.')
parser.add_option('--restart-usb', action='store_true',
help='Restart USB ports before running device check.')
options, args = parser.parse_args()
if args:
parser.error('Unknown options %s' % args)
# Remove the last build's "bad devices" before checking device statuses.
device_blacklist.ResetBlacklist()
if options.restart_usb:
try:
expected_devices = device_list.GetPersistentDeviceList(
os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
except IOError:
expected_devices = []
devices = android_commands.GetAttachedDevices()
# Only restart usb if devices are missing.
if set(expected_devices) != set(devices):
KillAllAdb()
retries = 5
usb_restarted = True
if not RestartUsb():
usb_restarted = False
bb_annotations.PrintWarning()
print 'USB reset stage failed, wait for any device to come back.'
while retries:
time.sleep(1)
devices = android_commands.GetAttachedDevices()
if set(expected_devices) == set(devices):
# All devices are online, keep going.
break
if not usb_restarted and devices:
# The USB wasn't restarted, but there's at least one device online.
# No point in trying to wait for all devices.
break
retries -= 1
devices = android_commands.GetAttachedDevices()
# TODO(navabi): Test to make sure this fails and then fix call
offline_devices = android_commands.GetAttachedDevices(
hardware=False, emulator=False, offline=True)
types, builds, batteries, reports, errors = [], [], [], [], []
fail_step_lst = []
if devices:
types, builds, batteries, reports, errors, fail_step_lst = (
zip(*[DeviceInfo(dev, options) for dev in devices]))
err_msg = CheckForMissingDevices(options, devices) or []
unique_types = list(set(types))
unique_builds = list(set(builds))
bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s'
% (len(devices), unique_types, unique_builds))
print '\n'.join(reports)
for serial, dev_errors in zip(devices, errors):
if dev_errors:
err_msg += ['%s errors:' % serial]
err_msg += [' %s' % error for error in dev_errors]
if err_msg:
bb_annotations.PrintWarning()
msg = '\n'.join(err_msg)
print msg
from_address = 'buildbot@chromium.org'
to_address = 'chromium-android-device-alerts@google.com'
bot_name = os.environ.get('BUILDBOT_BUILDERNAME')
slave_name = os.environ.get('BUILDBOT_SLAVENAME')
subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name)
SendEmail(from_address, to_address, subject, msg)
if options.device_status_dashboard:
perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices',
[len(devices)], 'devices')
perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices',
[len(offline_devices)], 'devices',
'unimportant')
for serial, battery in zip(devices, batteries):
perf_tests_results_helper.PrintPerfResult('DeviceBattery', serial,
[battery], '%',
'unimportant')
if False in fail_step_lst:
# TODO(navabi): Build fails on device status check step if there exists any
# devices with critically low battery. Remove those devices from testing,
# allowing build to continue with good devices.
return 2
if not devices:
return 1
if __name__ == '__main__':
sys.exit(main())
| |
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2007, Frank Scholz <coherence@beebits.net>
# Copyright 2008, Jean-Michel Sizun <jm.sizun@free.fr>
from twisted.internet import defer
from coherence.upnp.core import utils
from coherence.upnp.core.utils import ReverseProxyUriResource
from coherence.upnp.core.DIDLLite import classChooser, Container, Resource, DIDLElement
from coherence.backend import BackendStore
from coherence.backend import BackendItem
from urlparse import urlsplit
from coherence.extern.galleryremote import Gallery
class ProxyGallery2Image(ReverseProxyUriResource):
def __init__(self, uri):
ReverseProxyUriResource.__init__(self, uri)
def render(self, request):
del request.responseHeaders['referer']
return ReverseProxyUriResource.render(self, request)
class Gallery2Item(BackendItem):
logCategory = 'gallery2_item'
def __init__(self, id, obj, parent, mimetype, urlbase, UPnPClass,update=False):
self.id = id
self.name = obj.get('title')#.encode('utf-8')
if self.name == None:
self.name = obj.get('name')
if self.name == None:
self.name = id
self.mimetype = mimetype
self.gallery2_id = obj.get('gallery2_id')
self.parent = parent
if parent:
parent.add_child(self,update=update)
if parent == None:
parent_id = -1
else:
parent_id = parent.get_id()
self.item = UPnPClass(id, parent_id, self.name)
if isinstance(self.item, Container):
self.item.childCount = 0
self.child_count = 0
self.children = None
if( len(urlbase) and urlbase[-1] != '/'):
urlbase += '/'
if parent == None:
self.gallery2_url = None
self.url = urlbase + str(self.id)
elif self.mimetype == 'directory':
#self.gallery2_url = parent.store.get_url_for_album(self.gallery2_id)
self.url = urlbase + str(self.id)
else:
self.gallery2_url = parent.store.get_url_for_image(self.gallery2_id)
self.url = urlbase + str(self.id)
self.location = ProxyGallery2Image(self.gallery2_url)
if self.mimetype == 'directory':
self.update_id = 0
else:
res = Resource(self.gallery2_url, 'http-get:*:%s:*' % self.mimetype)
res.size = None
self.item.res.append(res)
def remove(self):
if self.parent:
self.parent.remove_child(self)
del self.item
def add_child(self, child, update=False):
if self.children == None:
self.children = []
self.children.append(child)
self.child_count += 1
if isinstance(self.item, Container):
self.item.childCount += 1
if update == True:
self.update_id += 1
def remove_child(self, child):
#self.info("remove_from %d (%s) child %d (%s)" % (self.id, self.get_name(), child.id, child.get_name()))
if child in self.children:
self.child_count -= 1
if isinstance(self.item, Container):
self.item.childCount -= 1
self.children.remove(child)
self.update_id += 1
def get_children(self,start=0,request_count=0):
def process_items(result = None):
if self.children == None:
return []
if request_count == 0:
return self.children[start:]
else:
return self.children[start:request_count]
if (self.children == None):
d = self.store.retrieveItemsForAlbum(self.gallery2_id, self)
d.addCallback(process_items)
return d
else:
return process_items()
def get_child_count(self):
return self.child_count
def get_id(self):
return self.id
def get_update_id(self):
if hasattr(self, 'update_id'):
return self.update_id
else:
return None
def get_path(self):
return self.url
def get_name(self):
return self.name
def get_parent(self):
return self.parent
def get_item(self):
return self.item
def get_xml(self):
return self.item.toString()
def __repr__(self):
return 'id: ' + str(self.id)
class Gallery2Store(BackendStore):
logCategory = 'gallery2_store'
implements = ['MediaServer']
description = ('Gallery2', 'exposes the photos from a Gallery2 photo repository.', None)
options = [{'option':'name', 'text':'Server Name:', 'type':'string','default':'my media','help': 'the name under this MediaServer shall show up with on other UPnP clients'},
{'option':'version','text':'UPnP Version:','type':'int','default':2,'enum': (2,1),'help': 'the highest UPnP version this MediaServer shall support','level':'advance'},
{'option':'uuid','text':'UUID Identifier:','type':'string','help':'the unique (UPnP) identifier for this MediaServer, usually automatically set','level':'advance'},
{'option':'server_url','text':'Server URL:','type':'string'},
{'option':'username','text':'User ID:','type':'string','group':'User Account'},
{'option':'password','text':'Password:','type':'string','group':'User Account'},
]
def __init__(self, server, **kwargs):
BackendStore.__init__(self,server,**kwargs)
self.next_id = 1000
self.config = kwargs
self.name = kwargs.get('name','gallery2Store')
self.wmc_mapping = {'16': 1000}
self.update_id = 0
self.store = {}
self.gallery2_server_url = self.config.get('server_url', 'http://localhost/gallery2')
self.gallery2_username = self.config.get('username',None)
self.gallery2_password = self.config.get('password',None)
self.store[1000] = Gallery2Item( 1000, {'title':'Gallery2','gallery2_id':'0','mimetype':'directory'}, None,
'directory', self.urlbase,Container,update=True)
self.store[1000].store = self
self.gallery2_remote = Gallery(self.gallery2_server_url, 2)
if not None in [self.gallery2_username, self.gallery2_password]:
d = self.gallery2_remote.login(self.gallery2_username, self.gallery2_password)
d.addCallback(lambda x : self.retrieveAlbums('0', self.store[1000]))
d.addCallback(self.init_completed)
else:
d = self.retrieveAlbums('0', self.store[1000])
d.addCallback(self.init_completed)
def __repr__(self):
return self.__class__.__name__
def append( self, obj, parent):
if isinstance(obj, basestring):
mimetype = 'directory'
else:
mimetype = obj['mimetype']
UPnPClass = classChooser(mimetype)
id = self.getnextID()
update = False
#if hasattr(self, 'update_id'):
# update = True
item = Gallery2Item( id, obj, parent, mimetype, self.urlbase,
UPnPClass, update=update)
self.store[id] = item
self.store[id].store = self
if hasattr(self, 'update_id'):
self.update_id += 1
if self.server:
self.server.content_directory_server.set_variable(0, 'SystemUpdateID', self.update_id)
#if parent:
# value = (parent.get_id(),parent.get_update_id())
# if self.server:
# self.server.content_directory_server.set_variable(0, 'ContainerUpdateIDs', value)
if mimetype == 'directory':
return self.store[id]
return None
def len(self):
return len(self.store)
def get_by_id(self,id):
if isinstance(id, basestring):
id = id.split('@',1)
id = id[0]
try:
id = int(id)
except ValueError:
id = 1000
if id == 0:
id = 1000
try:
return self.store[id]
except:
return None
def getnextID(self):
self.next_id += 1
return self.next_id
def get_url_for_image(self, gallery2_id):
url = self.gallery2_remote.get_URL_for_image(gallery2_id)
return url
def upnp_init(self):
self.current_connection_id = None
if self.server:
self.server.connection_manager_server.set_variable(0, 'SourceProtocolInfo',
'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_TN;DLNA.ORG_OP=01;DLNA.ORG_FLAGS=00f00000000000000000000000000000,'
'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_SM;DLNA.ORG_OP=01;DLNA.ORG_FLAGS=00f00000000000000000000000000000,'
'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_MED;DLNA.ORG_OP=01;DLNA.ORG_FLAGS=00f00000000000000000000000000000,'
'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_LRG;DLNA.ORG_OP=01;DLNA.ORG_FLAGS=00f00000000000000000000000000000,'
'http-get:*:image/jpeg:*,'
'http-get:*:image/gif:*,'
'http-get:*:image/png:*',
default=True)
def retrieveAlbums(self, album_gallery2_id, parent):
d = self.gallery2_remote.fetch_albums()
def gotAlbums (albums):
if albums :
albums = [album for album in albums.values() if album.get('parent') == album_gallery2_id]
if album_gallery2_id == '0' and len(albums) == 1:
album = albums[0]
self.store[1000].gallery2_id = album.get('name')
self.store[1000].name = album.get('title')
self.store[1000].description = album.get('summary')
else:
for album in albums:
gallery2_id = album.get('name')
parent_gallery2_id = album.get('parent')
title = album.get('title')
description = album.get('summary')
store_item = {
'name':id,
'gallery2_id':gallery2_id,
'parent_id':parent_gallery2_id,
'title':title,
'description':description,
'mimetype':'directory',
}
self.append(store_item, parent)
d.addCallback(gotAlbums)
return d
def retrieveItemsForAlbum (self, album_id, parent):
# retrieve subalbums
d1 = self.retrieveAlbums(album_id, parent)
# retrieve images
d2 = self.gallery2_remote.fetch_album_images(album_id)
def gotImages(images):
if images :
for image in images:
image_gallery2_id = image.get('name')
parent_gallery2_id = image.get('parent')
thumbnail_gallery2_id = image.get('thumbName')
resized_gallery2_id = image.get('resizedName')
title = image.get('title')
description = image.get('description')
gallery2_id = resized_gallery2_id
if gallery2_id == '':
gallery2_id = image_gallery2_id
store_item = {
'name':id,
'gallery2_id':gallery2_id,
'parent_id':parent_gallery2_id,
'thumbnail_gallery2_id':thumbnail_gallery2_id,
'title':title,
'description':description,
'mimetype':'image/jpeg',
}
self.append(store_item, parent)
d2.addCallback(gotImages)
dl = defer.DeferredList([d1,d2])
return dl
def main():
f = Gallery2Store(None)
def got_upnp_result(result):
print "upnp", result
f.upnp_init()
if __name__ == '__main__':
from twisted.internet import reactor
reactor.callWhenRunning(main)
reactor.run()
| |
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Classes for collecting results of our BuildStages as they run."""
import datetime
import math
import os
from chromite.lib import cros_build_lib
def _GetCheckpointFile(buildroot):
return os.path.join(buildroot, '.completed_stages')
def WriteCheckpoint(buildroot):
"""Drops a completed stages file with current state."""
completed_stages_file = _GetCheckpointFile(buildroot)
with open(completed_stages_file, 'w+') as save_file:
Results.SaveCompletedStages(save_file)
def LoadCheckpoint(buildroot):
"""Restore completed stage info from checkpoint file."""
completed_stages_file = _GetCheckpointFile(buildroot)
if not os.path.exists(completed_stages_file):
cros_build_lib.Warning('Checkpoint file not found in buildroot %s'
% buildroot)
return
with open(completed_stages_file, 'r') as load_file:
Results.RestoreCompletedStages(load_file)
class StepFailure(Exception):
"""StepFailure exceptions indicate that a cbuildbot step failed.
Exceptions that derive from StepFailure should meet the following
criteria:
1) The failure indicates that a cbuildbot step failed.
2) The necessary information to debug the problem has already been
printed in the logs for the stage that failed.
3) __str__() should be brief enough to include in a Commit Queue
failure message.
"""
class BuildScriptFailure(StepFailure):
"""This exception is thrown when a build command failed.
It is intended to provide a shorter summary of what command failed,
for usage in failure messages from the Commit Queue, so as to ensure
that developers aren't spammed with giant error messages when common
commands (e.g. build_packages) fail.
"""
def __init__(self, exception, shortname):
"""Construct a BuildScriptFailure object.
Args:
exception: A RunCommandError object.
shortname: Short name for the command we're running.
"""
StepFailure.__init__(self)
assert isinstance(exception, cros_build_lib.RunCommandError)
self.exception = exception
self.shortname = shortname
self.args = (exception, shortname)
def __str__(self):
"""Summarize a build command failure briefly."""
result = self.exception.result
if result.returncode:
return '%s failed (code=%s)' % (self.shortname, result.returncode)
else:
return self.exception.msg
class PackageBuildFailure(BuildScriptFailure):
"""This exception is thrown when packages fail to build."""
def __init__(self, exception, shortname, failed_packages):
"""Construct a PackageBuildFailure object.
Args:
exception: The underlying exception.
shortname: Short name for the command we're running.
failed_packages: List of packages that failed to build.
"""
BuildScriptFailure.__init__(self, exception, shortname)
self.failed_packages = set(failed_packages)
self.args += (failed_packages,)
def __str__(self):
return ('Packages failed in %s: %s'
% (self.shortname, ' '.join(sorted(self.failed_packages))))
class RecordedTraceback(object):
"""This class represents a traceback recorded in the list of results."""
def __init__(self, failed_stage, exception, traceback):
"""Construct a RecordedTraceback object.
Args:
failed_stage: The stage that failed during the build.
exception: The raw exception object.
traceback: The full stack trace for the failure, as a string.
"""
self.failed_stage = failed_stage
self.exception = exception
self.traceback = traceback
class _Results(object):
"""Static class that collects the results of our BuildStages as they run."""
# Stored in the results log for a stage skipped because it was previously
# completed successfully.
SUCCESS = "Stage was successful"
FORGIVEN = "Stage failed but was optional"
SPLIT_TOKEN = "\_O_/"
def __init__(self):
# List of results for all stages that's built up as we run. Members are of
# the form ('name', SUCCESS | FORGIVEN | Exception, None | description)
self._results_log = []
# Stages run in a previous run and restored. Stored as a dictionary of
# names to previous records.
self._previous = {}
def Clear(self):
"""Clear existing stage results."""
self.__init__()
def PreviouslyCompletedRecord(self, name):
"""Check to see if this stage was previously completed.
Returns:
A boolean showing the stage was successful in the previous run.
"""
return self._previous.get(name)
def BuildSucceededSoFar(self):
"""Return true if all stages so far have passing states.
This method returns true if all was successful or forgiven.
"""
for entry in self._results_log:
_, result, _, _ = entry
if not result in (self.SUCCESS, self.FORGIVEN):
return False
return True
def WasStageSuccessful(self, name):
"""Return true stage passed."""
cros_build_lib.Info('Checking for %s' % name)
for entry in self._results_log:
entry, result, _, _ = entry
if entry == name:
cros_build_lib.Info('Found %s' % result)
return result == self.SUCCESS
return False
def Record(self, name, result, description=None, time=0):
"""Store off an additional stage result.
Args:
name: The name of the stage
result:
Result should be one of:
Results.SUCCESS if the stage was successful.
The exception the stage errored with.
description:
The textual backtrace of the exception, or None
"""
self._results_log.append((name, result, description, time))
def UpdateResult(self, name, result, description=None):
"""Updates a stage result with a different result.
Args:
name: The name of the stage
result:
Result should be Results.SUCCESS if the stage was successful
otherwise the exception the stage errored with.
description:
The textual backtrace of the exception, or None
"""
for index in range(len(self._results_log)):
if self._results_log[index][0] == name:
_, _, _, run_time = self._results_log[index]
self._results_log[index] = name, result, description, run_time
break
def Get(self):
"""Fetch stage results.
Returns:
A list with one entry per stage run with a result.
"""
return self._results_log
def GetPrevious(self):
"""Fetch stage results.
Returns:
A list of stages names that were completed in a previous run.
"""
return self._previous
def SaveCompletedStages(self, out):
"""Save the successfully completed stages to the provided file |out|."""
for name, result, description, time in self._results_log:
if result != self.SUCCESS: break
out.write(self.SPLIT_TOKEN.join([name, str(description), str(time)]))
out.write('\n')
def RestoreCompletedStages(self, out):
"""Load the successfully completed stages from the provided file |out|."""
# Read the file, and strip off the newlines.
for line in out:
record = line.strip().split(self.SPLIT_TOKEN)
if len(record) != 3:
cros_build_lib.Warning(
'State file does not match expected format, ignoring.')
# Wipe any partial state.
self._previous = {}
break
self._previous[record[0]] = record
def GetTracebacks(self):
"""Get a list of the exceptions that failed the build.
Returns:
A list of RecordedTraceback objects.
"""
for name, result, description, _ in self._results_log:
# If result is not SUCCESS or FORGIVEN, then the stage failed, and
# result is the exception object and description is a string containing
# the full traceback.
if result not in (self.SUCCESS, self.FORGIVEN):
yield RecordedTraceback(name, result, description)
def Report(self, out, archive_urls=None, current_version=None):
"""Generate a user friendly text display of the results data."""
results = self._results_log
line = '*' * 60 + '\n'
edge = '*' * 2
if current_version:
out.write(line)
out.write(edge +
' RELEASE VERSION: ' +
current_version +
'\n')
out.write(line)
out.write(edge + ' Stage Results\n')
for name, result, _, run_time in results:
timestr = datetime.timedelta(seconds=math.ceil(run_time))
out.write(line)
details = ''
if result == self.SUCCESS:
status = 'PASS'
elif result == self.FORGIVEN:
status = 'FAILED BUT FORGIVEN'
else:
status = 'FAIL'
if isinstance(result, cros_build_lib.RunCommandError):
# If there was a RunCommand error, give just the command that
# failed, not its full argument list, since those are usually
# too long.
details = ' in %s' % result.result.cmd[0]
elif isinstance(result, BuildScriptFailure):
# BuildScriptFailure errors publish a 'short' name of the
# command that failed.
details = ' in %s' % result.shortname
else:
# There was a normal error. Give the type of exception.
details = ' with %s' % type(result).__name__
out.write('%s %s %s (%s)%s\n' % (edge, status, name, timestr, details))
out.write(line)
if archive_urls:
out.write('%s BUILD ARTIFACTS FOR THIS BUILD CAN BE FOUND AT:\n' % edge)
for board, url in sorted(archive_urls.iteritems()):
out.write('%s %s: %s\n' % (edge, board, url))
out.write('@@@STEP_LINK@Artifacts[%s]@%s@@@\n' % (board, url))
out.write(line)
for x in self.GetTracebacks():
if x.failed_stage and x.traceback:
out.write('\nFailed in stage %s:\n\n' % x.failed_stage)
out.write(x.traceback)
out.write('\n')
Results = _Results()
| |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO
class InstanceAdminGrpcTransport(InstanceAdminTransport):
"""gRPC backend transport for InstanceAdmin.
Cloud Spanner Instance Admin API
The Cloud Spanner Instance Admin API can be used to create,
delete, modify and list instances. Instances are dedicated Cloud
Spanner serving and storage resources to be used by Cloud
Spanner databases.
Each instance has a "configuration", which dictates where the
serving resources for the Cloud Spanner instance are located
(e.g., US-central, Europe). Configurations are created by Google
based on resource availability.
Cloud Spanner billing is based on the instances that exist and
their sizes. After an instance exists, there are no additional
per-database or per-operation charges for use of the instance
(though there may be additional network bandwidth charges).
Instances offer isolation: problems with databases in one
instance will not affect other instances. However, within an
instance databases can affect each other. For example, if one
database in an instance receives a lot of requests and consumes
most of the instance resources, fewer resources are available
for other databases in that instance, and their performance may
suffer.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "spanner.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "spanner.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
return self._operations_client
@property
def list_instance_configs(
self,
) -> Callable[
[spanner_instance_admin.ListInstanceConfigsRequest],
spanner_instance_admin.ListInstanceConfigsResponse,
]:
r"""Return a callable for the list instance configs method over gRPC.
Lists the supported instance configurations for a
given project.
Returns:
Callable[[~.ListInstanceConfigsRequest],
~.ListInstanceConfigsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instance_configs" not in self._stubs:
self._stubs["list_instance_configs"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs",
request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize,
response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize,
)
return self._stubs["list_instance_configs"]
@property
def get_instance_config(
self,
) -> Callable[
[spanner_instance_admin.GetInstanceConfigRequest],
spanner_instance_admin.InstanceConfig,
]:
r"""Return a callable for the get instance config method over gRPC.
Gets information about a particular instance
configuration.
Returns:
Callable[[~.GetInstanceConfigRequest],
~.InstanceConfig]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance_config" not in self._stubs:
self._stubs["get_instance_config"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig",
request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize,
response_deserializer=spanner_instance_admin.InstanceConfig.deserialize,
)
return self._stubs["get_instance_config"]
@property
def list_instances(
self,
) -> Callable[
[spanner_instance_admin.ListInstancesRequest],
spanner_instance_admin.ListInstancesResponse,
]:
r"""Return a callable for the list instances method over gRPC.
Lists all instances in the given project.
Returns:
Callable[[~.ListInstancesRequest],
~.ListInstancesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances",
request_serializer=spanner_instance_admin.ListInstancesRequest.serialize,
response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"]
@property
def get_instance(
self,
) -> Callable[
[spanner_instance_admin.GetInstanceRequest], spanner_instance_admin.Instance
]:
r"""Return a callable for the get instance method over gRPC.
Gets information about a particular instance.
Returns:
Callable[[~.GetInstanceRequest],
~.Instance]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance",
request_serializer=spanner_instance_admin.GetInstanceRequest.serialize,
response_deserializer=spanner_instance_admin.Instance.deserialize,
)
return self._stubs["get_instance"]
@property
def create_instance(
self,
) -> Callable[
[spanner_instance_admin.CreateInstanceRequest], operations_pb2.Operation
]:
r"""Return a callable for the create instance method over gRPC.
Creates an instance and begins preparing it to begin serving.
The returned [long-running
operation][google.longrunning.Operation] can be used to track
the progress of preparing the new instance. The instance name is
assigned by the caller. If the named instance already exists,
``CreateInstance`` returns ``ALREADY_EXISTS``.
Immediately upon completion of this request:
- The instance is readable via the API, with all requested
attributes but no allocated resources. Its state is
``CREATING``.
Until completion of the returned operation:
- Cancelling the operation renders the instance immediately
unreadable via the API.
- The instance can be deleted.
- All other attempts to modify the instance are rejected.
Upon completion of the returned operation:
- Billing for all successfully-allocated resources begins (some
types may have lower than the requested levels).
- Databases can be created in the instance.
- The instance's allocated resource levels are readable via the
API.
- The instance's state becomes ``READY``.
The returned [long-running
operation][google.longrunning.Operation] will have a name of the
format ``<instance_name>/operations/<operation_id>`` and can be
used to track creation of the instance. The
[metadata][google.longrunning.Operation.metadata] field type is
[CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata].
The [response][google.longrunning.Operation.response] field type
is [Instance][google.spanner.admin.instance.v1.Instance], if
successful.
Returns:
Callable[[~.CreateInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_instance" not in self._stubs:
self._stubs["create_instance"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance",
request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_instance"]
@property
def update_instance(
self,
) -> Callable[
[spanner_instance_admin.UpdateInstanceRequest], operations_pb2.Operation
]:
r"""Return a callable for the update instance method over gRPC.
Updates an instance, and begins allocating or releasing
resources as requested. The returned [long-running
operation][google.longrunning.Operation] can be used to track
the progress of updating the instance. If the named instance
does not exist, returns ``NOT_FOUND``.
Immediately upon completion of this request:
- For resource types for which a decrease in the instance's
allocation has been requested, billing is based on the
newly-requested level.
Until completion of the returned operation:
- Cancelling the operation sets its metadata's
[cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time],
and begins restoring resources to their pre-request values.
The operation is guaranteed to succeed at undoing all
resource changes, after which point it terminates with a
``CANCELLED`` status.
- All other attempts to modify the instance are rejected.
- Reading the instance via the API continues to give the
pre-request resource levels.
Upon completion of the returned operation:
- Billing begins for all successfully-allocated resources (some
types may have lower than the requested levels).
- All newly-reserved resources are available for serving the
instance's tables.
- The instance's new resource levels are readable via the API.
The returned [long-running
operation][google.longrunning.Operation] will have a name of the
format ``<instance_name>/operations/<operation_id>`` and can be
used to track the instance modification. The
[metadata][google.longrunning.Operation.metadata] field type is
[UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata].
The [response][google.longrunning.Operation.response] field type
is [Instance][google.spanner.admin.instance.v1.Instance], if
successful.
Authorization requires ``spanner.instances.update`` permission
on resource
[name][google.spanner.admin.instance.v1.Instance.name].
Returns:
Callable[[~.UpdateInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_instance" not in self._stubs:
self._stubs["update_instance"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance",
request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_instance"]
@property
def delete_instance(
self,
) -> Callable[[spanner_instance_admin.DeleteInstanceRequest], empty_pb2.Empty]:
r"""Return a callable for the delete instance method over gRPC.
Deletes an instance.
Immediately upon completion of the request:
- Billing ceases for all of the instance's reserved resources.
Soon afterward:
- The instance and *all of its databases* immediately and
irrevocably disappear from the API. All data in the databases
is permanently deleted.
Returns:
Callable[[~.DeleteInstanceRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance",
request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_instance"]
@property
def set_iam_policy(
self,
) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy on an instance resource. Replaces
any existing policy.
Authorization requires ``spanner.instances.setIamPolicy`` on
[resource][google.iam.v1.SetIamPolicyRequest.resource].
Returns:
Callable[[~.SetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy",
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@property
def get_iam_policy(
self,
) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for an instance resource. Returns
an empty policy if an instance exists but does not have a policy
set.
Authorization requires ``spanner.instances.getIamPolicy`` on
[resource][google.iam.v1.GetIamPolicyRequest.resource].
Returns:
Callable[[~.GetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy",
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
iam_policy_pb2.TestIamPermissionsResponse,
]:
r"""Return a callable for the test iam permissions method over gRPC.
Returns permissions that the caller has on the specified
instance resource.
Attempting this RPC on a non-existent Cloud Spanner instance
resource will result in a NOT_FOUND error if the user has
``spanner.instances.list`` permission on the containing Google
Cloud Project. Otherwise returns an empty set of permissions.
Returns:
Callable[[~.TestIamPermissionsRequest],
~.TestIamPermissionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions",
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
def close(self):
self.grpc_channel.close()
__all__ = ("InstanceAdminGrpcTransport",)
| |
"""
A collection of functions to find the weights and abscissas for
Gaussian Quadrature.
These calculations are done by finding the eigenvalues of a
tridiagonal matrix whose entries are dependent on the coefficients
in the recursion formula for the orthogonal polynomials with the
corresponding weighting function over the interval.
Many recursion relations for orthogonal polynomials are given:
.. math::
a1n f_{n+1} (x) = (a2n + a3n x ) f_n (x) - a4n f_{n-1} (x)
The recursion relation of interest is
.. math::
P_{n+1} (x) = (x - A_n) P_n (x) - B_n P_{n-1} (x)
where :math:`P` has a different normalization than :math:`f`.
The coefficients can be found as:
.. math::
A_n = -a2n / a3n
\\qquad
B_n = ( a4n / a3n \\sqrt{h_n-1 / h_n})^2
where
.. math::
h_n = \\int_a^b w(x) f_n(x)^2
assume:
.. math::
P_0 (x) = 1
\\qquad
P_{-1} (x) == 0
For the mathematical background, see [golub.welsch-1969-mathcomp]_ and
[abramowitz.stegun-1965]_.
Functions::
gen_roots_and_weights -- Generic roots and weights.
j_roots -- Jacobi
js_roots -- Shifted Jacobi
la_roots -- Generalized Laguerre
h_roots -- Hermite
he_roots -- Hermite (unit-variance)
cg_roots -- Ultraspherical (Gegenbauer)
t_roots -- Chebyshev of the first kind
u_roots -- Chebyshev of the second kind
c_roots -- Chebyshev of the first kind ([-2,2] interval)
s_roots -- Chebyshev of the second kind ([-2,2] interval)
ts_roots -- Shifted Chebyshev of the first kind.
us_roots -- Shifted Chebyshev of the second kind.
p_roots -- Legendre
ps_roots -- Shifted Legendre
l_roots -- Laguerre
.. [golub.welsch-1969-mathcomp]
Golub, Gene H, and John H Welsch. 1969. Calculation of Gauss
Quadrature Rules. *Mathematics of Computation* 23, 221-230+s1--s10.
.. [abramowitz.stegun-1965]
Abramowitz, Milton, and Irene A Stegun. (1965) *Handbook of
Mathematical Functions: with Formulas, Graphs, and Mathematical
Tables*. Gaithersburg, MD: National Bureau of Standards.
http://www.math.sfu.ca/~cbm/aands/
"""
#
# Author: Travis Oliphant 2000
# Updated Sep. 2003 (fixed bugs --- tested to be accurate)
from __future__ import division, print_function, absolute_import
# Scipy imports.
import numpy as np
from numpy import all, any, exp, inf, pi, sqrt
from numpy.dual import eig
from scipy import linalg
# Local imports.
from . import _ufuncs as cephes
_gam = cephes.gamma
__all__ = ['legendre', 'chebyt', 'chebyu', 'chebyc', 'chebys',
'jacobi', 'laguerre', 'genlaguerre', 'hermite', 'hermitenorm',
'gegenbauer', 'sh_legendre', 'sh_chebyt', 'sh_chebyu', 'sh_jacobi',
'p_roots', 'ps_roots', 'j_roots', 'js_roots', 'l_roots', 'la_roots',
'he_roots', 'ts_roots', 'us_roots', 's_roots', 't_roots', 'u_roots',
'c_roots', 'cg_roots', 'h_roots',
'eval_legendre', 'eval_chebyt', 'eval_chebyu', 'eval_chebyc',
'eval_chebys', 'eval_jacobi', 'eval_laguerre', 'eval_genlaguerre',
'eval_hermite', 'eval_hermitenorm', 'eval_gegenbauer',
'eval_sh_legendre', 'eval_sh_chebyt', 'eval_sh_chebyu',
'eval_sh_jacobi', 'poch', 'binom']
# For backward compatibility
poch = cephes.poch
class orthopoly1d(np.poly1d):
def __init__(self, roots, weights=None, hn=1.0, kn=1.0, wfunc=None, limits=None, monic=0,eval_func=None):
np.poly1d.__init__(self, roots, r=1)
equiv_weights = [weights[k] / wfunc(roots[k]) for k in range(len(roots))]
self.__dict__['weights'] = np.array(list(zip(roots,weights,equiv_weights)))
self.__dict__['weight_func'] = wfunc
self.__dict__['limits'] = limits
mu = sqrt(hn)
if monic:
evf = eval_func
if evf:
eval_func = lambda x: evf(x)/kn
mu = mu / abs(kn)
kn = 1.0
self.__dict__['normcoef'] = mu
self.__dict__['coeffs'] *= kn
# Note: eval_func will be discarded on arithmetic
self.__dict__['_eval_func'] = eval_func
def __call__(self, v):
if self._eval_func and not isinstance(v, np.poly1d):
return self._eval_func(v)
else:
return np.poly1d.__call__(self, v)
def _scale(self, p):
if p == 1.0:
return
self.__dict__['coeffs'] *= p
evf = self.__dict__['_eval_func']
if evf:
self.__dict__['_eval_func'] = lambda x: evf(x) * p
self.__dict__['normcoef'] *= p
def gen_roots_and_weights(n, an_func, sqrt_bn_func, mu):
"""[x,w] = gen_roots_and_weights(n,an_func,sqrt_bn_func,mu)
Returns the roots (x) of an nth order orthogonal polynomial,
and weights (w) to use in appropriate Gaussian quadrature with that
orthogonal polynomial.
The polynomials have the recurrence relation
P_n+1(x) = (x - A_n) P_n(x) - B_n P_n-1(x)
an_func(n) should return A_n
sqrt_bn_func(n) should return sqrt(B_n)
mu ( = h_0 ) is the integral of the weight over the orthogonal interval
"""
nn = np.arange(1.0,n)
sqrt_bn = sqrt_bn_func(nn)
an = an_func(np.concatenate(([0], nn)))
x, v = eig((np.diagflat(an) +
np.diagflat(sqrt_bn,1) +
np.diagflat(sqrt_bn,-1)))
answer = []
sortind = x.real.argsort()
answer.append(x[sortind])
answer.append((mu*v[0]**2)[sortind])
return answer
# Jacobi Polynomials 1 P^(alpha,beta)_n(x)
def j_roots(n, alpha, beta, mu=0):
"""[x,w] = j_roots(n,alpha,beta)
Returns the roots (x) of the nth order Jacobi polynomial, P^(alpha,beta)_n(x)
and weights (w) to use in Gaussian Quadrature over [-1,1] with weighting
function (1-x)**alpha (1+x)**beta with alpha,beta > -1.
"""
if any(alpha <= -1) or any(beta <= -1):
raise ValueError("alpha and beta must be greater than -1.")
if n < 1:
raise ValueError("n must be positive.")
olderr = np.seterr(all='ignore')
try:
(p,q) = (alpha,beta)
# from recurrence relations
sbn_J = lambda k: 2.0/(2.0*k+p+q)*sqrt((k+p)*(k+q)/(2*k+q+p+1)) * \
(np.where(k == 1,1.0,sqrt(k*(k+p+q)/(2.0*k+p+q-1))))
if any(p == q): # XXX any or all???
an_J = lambda k: 0.0*k
else:
an_J = lambda k: np.where(k == 0,(q-p)/(p+q+2.0),
(q*q - p*p)/((2.0*k+p+q)*(2.0*k+p+q+2)))
g = cephes.gamma
mu0 = 2.0**(p+q+1)*g(p+1)*g(q+1)/(g(p+q+2))
val = gen_roots_and_weights(n,an_J,sbn_J,mu0)
finally:
np.seterr(**olderr)
if mu:
return val + [mu0]
else:
return val
def jacobi(n, alpha, beta, monic=0):
"""Returns the nth order Jacobi polynomial, P^(alpha,beta)_n(x)
orthogonal over [-1,1] with weighting function
(1-x)**alpha (1+x)**beta with alpha,beta > -1.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1-x)**alpha * (1+x)**beta
if n == 0:
return orthopoly1d([],[],1.0,1.0,wfunc,(-1,1),monic,
eval_func=np.ones_like)
x,w,mu = j_roots(n,alpha,beta,mu=1)
ab1 = alpha+beta+1.0
hn = 2**ab1/(2*n+ab1)*_gam(n+alpha+1)
hn *= _gam(n+beta+1.0) / _gam(n+1) / _gam(n+ab1)
kn = _gam(2*n+ab1)/2.0**n / _gam(n+1) / _gam(n+ab1)
# here kn = coefficient on x^n term
p = orthopoly1d(x,w,hn,kn,wfunc,(-1,1),monic,
lambda x: eval_jacobi(n,alpha,beta,x))
return p
# Jacobi Polynomials shifted G_n(p,q,x)
def js_roots(n, p1, q1, mu=0):
"""[x,w] = js_roots(n,p,q)
Returns the roots (x) of the nth order shifted Jacobi polynomial, G_n(p,q,x),
and weights (w) to use in Gaussian Quadrature over [0,1] with weighting
function (1-x)**(p-q) x**(q-1) with p-q > -1 and q > 0.
"""
# from recurrence relation
if not (any((p1 - q1) > -1) and any(q1 > 0)):
raise ValueError("(p - q) > -1 and q > 0 please.")
if n <= 0:
raise ValueError("n must be positive.")
p,q = p1,q1
sbn_Js = lambda k: sqrt(np.where(k == 1,q*(p-q+1.0)/(p+2.0),
k*(k+q-1.0)*(k+p-1.0)*(k+p-q)
/ ((2.0*k+p-2) * (2.0*k+p))))/(2*k+p-1.0)
an_Js = lambda k: np.where(k == 0,q/(p+1.0),(2.0*k*(k+p)+q*(p-1.0)) / ((2.0*k+p+1.0)*(2*k+p-1.0)))
# could also use definition
# Gn(p,q,x) = constant_n * P^(p-q,q-1)_n(2x-1)
# so roots of Gn(p,q,x) are (roots of P^(p-q,q-1)_n + 1) / 2.0
g = _gam
# integral of weight over interval
mu0 = g(q)*g(p-q+1)/g(p+1)
val = gen_roots_and_weights(n,an_Js,sbn_Js,mu0)
if mu:
return val + [mu0]
else:
return val
# What code would look like using jacobi polynomial roots
#if mu:
# [x,w,mut] = j_roots(n,p-q,q-1,mu=1)
# return [(x+1)/2.0,w,mu0]
#else:
# [x,w] = j_roots(n,p-q,q-1,mu=0)
# return [(x+1)/2.0,w]
def sh_jacobi(n, p, q, monic=0):
"""Returns the nth order Jacobi polynomial, G_n(p,q,x)
orthogonal over [0,1] with weighting function
(1-x)**(p-q) (x)**(q-1) with p>q-1 and q > 0.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1.0-x)**(p-q) * (x)**(q-1.)
if n == 0:
return orthopoly1d([],[],1.0,1.0,wfunc,(-1,1),monic,
eval_func=np.ones_like)
n1 = n
x,w,mu0 = js_roots(n1,p,q,mu=1)
hn = _gam(n+1)*_gam(n+q)*_gam(n+p)*_gam(n+p-q+1)
hn /= (2*n+p)*(_gam(2*n+p)**2)
# kn = 1.0 in standard form so monic is redundant. Kept for compatibility.
kn = 1.0
pp = orthopoly1d(x,w,hn,kn,wfunc=wfunc,limits=(0,1),monic=monic,
eval_func=lambda x: eval_sh_jacobi(n, p, q, x))
return pp
# Generalized Laguerre L^(alpha)_n(x)
def la_roots(n, alpha, mu=0):
"""[x,w] = la_roots(n,alpha)
Returns the roots (x) of the nth order generalized (associated) Laguerre
polynomial, L^(alpha)_n(x), and weights (w) to use in Gaussian quadrature over
[0,inf] with weighting function exp(-x) x**alpha with alpha > -1.
"""
if not all(alpha > -1):
raise ValueError("alpha > -1")
if n < 1:
raise ValueError("n must be positive.")
(p,q) = (alpha,0.0)
sbn_La = lambda k: -sqrt(k*(k + p)) # from recurrence relation
an_La = lambda k: 2*k + p + 1
mu0 = cephes.gamma(alpha+1) # integral of weight over interval
val = gen_roots_and_weights(n,an_La,sbn_La,mu0)
if mu:
return val + [mu0]
else:
return val
def genlaguerre(n, alpha, monic=0):
"""Returns the nth order generalized (associated) Laguerre polynomial,
L^(alpha)_n(x), orthogonal over [0,inf) with weighting function
exp(-x) x**alpha with alpha > -1
"""
if any(alpha <= -1):
raise ValueError("alpha must be > -1")
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = la_roots(n1,alpha,mu=1)
wfunc = lambda x: exp(-x) * x**alpha
if n == 0:
x,w = [],[]
hn = _gam(n+alpha+1)/_gam(n+1)
kn = (-1)**n / _gam(n+1)
p = orthopoly1d(x,w,hn,kn,wfunc,(0,inf),monic,
lambda x: eval_genlaguerre(n,alpha,x))
return p
# Laguerre L_n(x)
def l_roots(n, mu=0):
"""[x,w] = l_roots(n)
Returns the roots (x) of the nth order Laguerre polynomial, L_n(x),
and weights (w) to use in Gaussian Quadrature over [0,inf] with weighting
function exp(-x).
"""
return la_roots(n,0.0,mu=mu)
def laguerre(n, monic=0):
"""Return the nth order Laguerre polynoimal, L_n(x), orthogonal over
[0,inf) with weighting function exp(-x)
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = l_roots(n1,mu=1)
if n == 0:
x,w = [],[]
hn = 1.0
kn = (-1)**n / _gam(n+1)
p = orthopoly1d(x,w,hn,kn,lambda x: exp(-x),(0,inf),monic,
lambda x: eval_laguerre(n,x))
return p
# Hermite 1 H_n(x)
def _h_gen_roots_and_weights(n, mu, factor, func):
"""Compute the roots and weights for Gaussian-Hermite quadrature.
Internal function.
"""
if n < 1:
raise ValueError("n must be positive.")
bn = np.sqrt(np.arange(1,n, dtype=np.float64)/factor)
c = np.diag(bn, -1)
x = linalg.eigvalsh(c, overwrite_a=True)
# improve roots by one application of Newton's method
dy = func(n, x)
df = factor*n*func(n-1, x)
x -= dy/df
df /= df.max()
w = 1 / (df * df)
# symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w correctly
w *= np.sqrt(2.0*np.pi/factor) / w.sum()
if mu:
return [x, w, mu]
else:
return x, w
def h_roots(n, mu=0):
"""[x,w] = h_roots(n)
Returns the roots (x) of the nth order Hermite polynomial,
H_n(x), and weights (w) to use in Gaussian Quadrature over
[-inf,inf] with weighting function exp(-x**2).
"""
return _h_gen_roots_and_weights(n, mu, 2.0, cephes.eval_hermite)
def hermite(n, monic=0):
"""Return the nth order Hermite polynomial, H_n(x), orthogonal over
(-inf,inf) with weighting function exp(-x**2)
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = h_roots(n1,mu=1)
wfunc = lambda x: exp(-x*x)
if n == 0:
x,w = [],[]
hn = 2**n * _gam(n+1)*sqrt(pi)
kn = 2**n
p = orthopoly1d(x,w,hn,kn,wfunc,(-inf,inf),monic,
lambda x: eval_hermite(n,x))
return p
# Hermite 2 He_n(x)
def he_roots(n, mu=0):
"""[x,w] = he_roots(n)
Returns the roots (x) of the nth order Hermite polynomial,
He_n(x), and weights (w) to use in Gaussian Quadrature over
[-inf,inf] with weighting function exp(-(x/2)**2).
"""
return _h_gen_roots_and_weights(n, mu, 1.0, cephes.eval_hermitenorm)
def hermitenorm(n, monic=0):
"""Return the nth order normalized Hermite polynomial, He_n(x), orthogonal
over (-inf,inf) with weighting function exp(-(x/2)**2)
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = he_roots(n1,mu=1)
wfunc = lambda x: exp(-x*x/4.0)
if n == 0:
x,w = [],[]
hn = sqrt(2*pi)*_gam(n+1)
kn = 1.0
p = orthopoly1d(x,w,hn,kn,wfunc=wfunc,limits=(-inf,inf),monic=monic,
eval_func=lambda x: eval_hermitenorm(n,x))
return p
## The remainder of the polynomials can be derived from the ones above.
# Ultraspherical (Gegenbauer) C^(alpha)_n(x)
def cg_roots(n, alpha, mu=0):
"""[x,w] = cg_roots(n,alpha)
Returns the roots (x) of the nth order Ultraspherical (Gegenbauer)
polynomial, C^(alpha)_n(x), and weights (w) to use in Gaussian Quadrature
over [-1,1] with weighting function (1-x**2)**(alpha-1/2) with alpha>-1/2.
"""
return j_roots(n,alpha-0.5,alpha-0.5,mu=mu)
def gegenbauer(n, alpha, monic=0):
"""Return the nth order Gegenbauer (ultraspherical) polynomial,
C^(alpha)_n(x), orthogonal over [-1,1] with weighting function
(1-x**2)**(alpha-1/2) with alpha > -1/2
"""
base = jacobi(n,alpha-0.5,alpha-0.5,monic=monic)
if monic:
return base
# Abrahmowitz and Stegan 22.5.20
factor = _gam(2*alpha+n)*_gam(alpha+0.5) / _gam(2*alpha) / _gam(alpha+0.5+n)
base._scale(factor)
base.__dict__['_eval_func'] = lambda x: eval_gegenbauer(float(n), alpha, x)
return base
# Chebyshev of the first kind: T_n(x) = n! sqrt(pi) / _gam(n+1./2)* P^(-1/2,-1/2)_n(x)
# Computed anew.
def t_roots(n, mu=0):
"""[x,w] = t_roots(n)
Returns the roots (x) of the nth order Chebyshev (of the first kind)
polynomial, T_n(x), and weights (w) to use in Gaussian Quadrature
over [-1,1] with weighting function (1-x**2)**(-1/2).
"""
if n < 1:
raise ValueError("n must be positive.")
# from recurrence relation
sbn_J = lambda k: np.where(k == 1,sqrt(2)/2.0,0.5)
an_J = lambda k: 0.0*k
g = cephes.gamma
mu0 = pi
val = gen_roots_and_weights(n,an_J,sbn_J,mu0)
if mu:
return val + [mu0]
else:
return val
def chebyt(n, monic=0):
"""Return nth order Chebyshev polynomial of first kind, Tn(x). Orthogonal
over [-1,1] with weight function (1-x**2)**(-1/2).
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 1.0/sqrt(1-x*x)
if n == 0:
return orthopoly1d([],[],pi,1.0,wfunc,(-1,1),monic,
lambda x: eval_chebyt(n,x))
n1 = n
x,w,mu = t_roots(n1,mu=1)
hn = pi/2
kn = 2**(n-1)
p = orthopoly1d(x,w,hn,kn,wfunc,(-1,1),monic,
lambda x: eval_chebyt(n,x))
return p
# Chebyshev of the second kind
# U_n(x) = (n+1)! sqrt(pi) / (2*_gam(n+3./2)) * P^(1/2,1/2)_n(x)
def u_roots(n, mu=0):
"""[x,w] = u_roots(n)
Returns the roots (x) of the nth order Chebyshev (of the second kind)
polynomial, U_n(x), and weights (w) to use in Gaussian Quadrature
over [-1,1] with weighting function (1-x**2)**1/2.
"""
return j_roots(n,0.5,0.5,mu=mu)
def chebyu(n, monic=0):
"""Return nth order Chebyshev polynomial of second kind, Un(x). Orthogonal
over [-1,1] with weight function (1-x**2)**(1/2).
"""
base = jacobi(n,0.5,0.5,monic=monic)
if monic:
return base
factor = sqrt(pi)/2.0*_gam(n+2) / _gam(n+1.5)
base._scale(factor)
return base
# Chebyshev of the first kind C_n(x)
def c_roots(n, mu=0):
"""[x,w] = c_roots(n)
Returns the roots (x) of the nth order Chebyshev (of the first kind)
polynomial, C_n(x), and weights (w) to use in Gaussian Quadrature
over [-2,2] with weighting function (1-(x/2)**2)**(-1/2).
"""
if mu:
[x,w,mu0] = j_roots(n,-0.5,-0.5,mu=1)
return [x*2,w,mu0]
else:
[x,w] = j_roots(n,-0.5,-0.5,mu=0)
return [x*2,w]
def chebyc(n, monic=0):
"""Return nth order Chebyshev polynomial of first kind, Cn(x). Orthogonal
over [-2,2] with weight function (1-(x/2)**2)**(-1/2).
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = c_roots(n1,mu=1)
if n == 0:
x,w = [],[]
hn = 4*pi * ((n == 0)+1)
kn = 1.0
p = orthopoly1d(x,w,hn,kn,wfunc=lambda x: 1.0/sqrt(1-x*x/4.0),limits=(-2,2),monic=monic)
if not monic:
p._scale(2.0/p(2))
p.__dict__['_eval_func'] = lambda x: eval_chebyc(n,x)
return p
# Chebyshev of the second kind S_n(x)
def s_roots(n, mu=0):
"""[x,w] = s_roots(n)
Returns the roots (x) of the nth order Chebyshev (of the second kind)
polynomial, S_n(x), and weights (w) to use in Gaussian Quadrature
over [-2,2] with weighting function (1-(x/2)**2)**1/2.
"""
if mu:
[x,w,mu0] = j_roots(n,0.5,0.5,mu=1)
return [x*2,w,mu0]
else:
[x,w] = j_roots(n,0.5,0.5,mu=0)
return [x*2,w]
def chebys(n, monic=0):
"""Return nth order Chebyshev polynomial of second kind, Sn(x). Orthogonal
over [-2,2] with weight function (1-(x/)**2)**(1/2).
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = s_roots(n1,mu=1)
if n == 0:
x,w = [],[]
hn = pi
kn = 1.0
p = orthopoly1d(x,w,hn,kn,wfunc=lambda x: sqrt(1-x*x/4.0),limits=(-2,2),monic=monic)
if not monic:
factor = (n+1.0)/p(2)
p._scale(factor)
p.__dict__['_eval_func'] = lambda x: eval_chebys(n,x)
return p
# Shifted Chebyshev of the first kind T^*_n(x)
def ts_roots(n, mu=0):
"""[x,w] = ts_roots(n)
Returns the roots (x) of the nth order shifted Chebyshev (of the first kind)
polynomial, T^*_n(x), and weights (w) to use in Gaussian Quadrature
over [0,1] with weighting function (x-x**2)**(-1/2).
"""
return js_roots(n,0.0,0.5,mu=mu)
def sh_chebyt(n, monic=0):
"""Return nth order shifted Chebyshev polynomial of first kind, Tn(x).
Orthogonal over [0,1] with weight function (x-x**2)**(-1/2).
"""
base = sh_jacobi(n,0.0,0.5,monic=monic)
if monic:
return base
if n > 0:
factor = 4**n / 2.0
else:
factor = 1.0
base._scale(factor)
return base
# Shifted Chebyshev of the second kind U^*_n(x)
def us_roots(n, mu=0):
"""[x,w] = us_roots(n)
Returns the roots (x) of the nth order shifted Chebyshev (of the second kind)
polynomial, U^*_n(x), and weights (w) to use in Gaussian Quadrature
over [0,1] with weighting function (x-x**2)**1/2.
"""
return js_roots(n,2.0,1.5,mu=mu)
def sh_chebyu(n, monic=0):
"""Return nth order shifted Chebyshev polynomial of second kind, Un(x).
Orthogonal over [0,1] with weight function (x-x**2)**(1/2).
"""
base = sh_jacobi(n,2.0,1.5,monic=monic)
if monic:
return base
factor = 4**n
base._scale(factor)
return base
# Legendre
def p_roots(n, mu=0):
"""[x,w] = p_roots(n)
Returns the roots (x) of the nth order Legendre polynomial, P_n(x),
and weights (w) to use in Gaussian Quadrature over [-1,1] with weighting
function 1.
"""
return j_roots(n,0.0,0.0,mu=mu)
def legendre(n, monic=0):
"""Returns the nth order Legendre polynomial, P_n(x), orthogonal over
[-1,1] with weight function 1.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n+1
else:
n1 = n
x,w,mu0 = p_roots(n1,mu=1)
if n == 0:
x,w = [],[]
hn = 2.0/(2*n+1)
kn = _gam(2*n+1)/_gam(n+1)**2 / 2.0**n
p = orthopoly1d(x,w,hn,kn,wfunc=lambda x: 1.0,limits=(-1,1),monic=monic,
eval_func=lambda x: eval_legendre(n,x))
return p
# Shifted Legendre P^*_n(x)
def ps_roots(n, mu=0):
"""[x,w] = ps_roots(n)
Returns the roots (x) of the nth order shifted Legendre polynomial, P^*_n(x),
and weights (w) to use in Gaussian Quadrature over [0,1] with weighting
function 1.
"""
return js_roots(n,1.0,1.0,mu=mu)
def sh_legendre(n, monic=0):
"""Returns the nth order shifted Legendre polynomial, P^*_n(x), orthogonal
over [0,1] with weighting function 1.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 0.0*x + 1.0
if n == 0:
return orthopoly1d([],[],1.0,1.0,wfunc,(0,1),monic,
lambda x: eval_sh_legendre(n,x))
x,w,mu0 = ps_roots(n,mu=1)
hn = 1.0/(2*n+1.0)
kn = _gam(2*n+1)/_gam(n+1)**2
p = orthopoly1d(x,w,hn,kn,wfunc,limits=(0,1),monic=monic,
eval_func=lambda x: eval_sh_legendre(n,x))
return p
#------------------------------------------------------------------------------
# Vectorized functions for evaluation
#------------------------------------------------------------------------------
from ._ufuncs import \
binom, eval_jacobi, eval_sh_jacobi, eval_gegenbauer, eval_chebyt, \
eval_chebyu, eval_chebys, eval_chebyc, eval_sh_chebyt, eval_sh_chebyu, \
eval_legendre, eval_sh_legendre, eval_genlaguerre, eval_laguerre, \
eval_hermite, eval_hermitenorm
| |
from __future__ import print_function
import os
import re
import sys
from talib import abstract
# FIXME: initialize once, then shutdown at the end, rather than each call?
# FIXME: should we pass startIdx and endIdx into function?
# FIXME: don't return number of elements since it always equals allocation?
functions = []
include_paths = ['/usr/include', '/usr/local/include', '/opt/include', '/opt/local/include']
if sys.platform == 'win32':
include_paths = [r'c:\ta-lib\c\include']
header_found = False
for path in include_paths:
ta_func_header = os.path.join(path, 'ta-lib', 'ta_func.h')
if os.path.exists(ta_func_header):
header_found = True
break
if not header_found:
print('Error: ta-lib/ta_func.h not found', file=sys.stderr)
sys.exit(1)
with open(ta_func_header) as f:
tmp = []
for line in f:
line = line.strip()
if tmp or \
line.startswith('TA_RetCode TA_') or \
line.startswith('int TA_'):
line = re.sub('/\*[^\*]+\*/', '', line) # strip comments
tmp.append(line)
if not line:
s = ' '.join(tmp)
s = re.sub('\s+', ' ', s)
functions.append(s)
tmp = []
# strip "float" functions
functions = [s for s in functions if not s.startswith('TA_RetCode TA_S_')]
# strip non-indicators
functions = [s for s in functions if not s.startswith('TA_RetCode TA_Set')]
functions = [s for s in functions if not s.startswith('TA_RetCode TA_Restore')]
# print headers
print("""\
cimport numpy as np
from cython import boundscheck, wraparound
cimport _ta_lib as lib
from _ta_lib cimport TA_RetCode
# NOTE: _ta_check_success, NaN are defined in common.pxi
# NumPy C API is initialize in _func.pxi
""")
# cleanup variable names to make them more pythonic
def cleanup(name):
if name.startswith('in'):
return name[2:].lower()
elif name.startswith('optIn'):
return name[5:].lower()
else:
return name.lower()
# print functions
names = []
for f in functions:
if 'Lookback' in f: # skip lookback functions
continue
i = f.index('(')
name = f[:i].split()[1]
args = f[i:].split(',')
args = [re.sub('[\(\);]', '', s).strip() for s in args]
shortname = name[3:]
names.append(shortname)
func_info = abstract.Function(shortname).info
defaults, documentation = abstract._get_defaults_and_docs(func_info)
print('@wraparound(False) # turn off relative indexing from end of lists')
print('@boundscheck(False) # turn off bounds-checking for entire function')
print('def stream_%s(' % shortname, end=' ')
docs = [' %s(' % shortname]
i = 0
for arg in args:
var = arg.split()[-1]
if var in ('startIdx', 'endIdx'):
continue
elif 'out' in var:
break
if i > 0:
print(',', end=' ')
i += 1
if var.endswith('[]'):
var = cleanup(var[:-2])
assert arg.startswith('const double'), arg
print('np.ndarray %s not None' % var, end=' ')
docs.append(var)
docs.append(', ')
elif var.startswith('opt'):
var = cleanup(var)
default_arg = arg.split()[-1][len('optIn'):] # chop off typedef and 'optIn'
default_arg = default_arg[0].lower() + default_arg[1:] # lowercase first letter
if arg.startswith('double'):
if default_arg in defaults:
print('double %s=%s' % (var, defaults[default_arg]), end=' ')
else:
print('double %s=-4e37' % var, end=' ') # TA_REAL_DEFAULT
elif arg.startswith('int'):
if default_arg in defaults:
print('int %s=%s' % (var, defaults[default_arg]), end=' ')
else:
print('int %s=-2**31' % var, end=' ') # TA_INTEGER_DEFAULT
elif arg.startswith('TA_MAType'):
print('int %s=0' % var, end=' ') # TA_MAType_SMA
else:
assert False, arg
if '[, ' not in docs:
docs[-1] = ('[, ')
docs.append('%s=?' % var)
docs.append(', ')
docs[-1] = '])' if '[, ' in docs else ')'
if documentation:
tmp_docs = []
lower_case = False
documentation = documentation.split('\n')[2:] # discard abstract calling definition
for line in documentation:
if 'prices' not in line and 'price' in line:
line = line.replace('price', 'real')
if not line or line.isspace():
tmp_docs.append('')
else:
tmp_docs.append(' %s' % line) # add an indent of 4 spaces
docs.append('\n\n')
docs.append('\n'.join(tmp_docs))
docs.append('\n ')
print('):')
print(' """%s"""' % ''.join(docs))
print(' cdef:')
print(' np.npy_intp length')
print(' double val')
print(' int begidx, endidx, lookback')
print(' TA_RetCode retCode')
for arg in args:
var = arg.split()[-1]
if 'out' in var:
break
if var.endswith('[]'):
var = cleanup(var[:-2])
if 'double' in arg:
print(' double* %s_data' % var)
elif 'int' in arg:
print(' int* %s_data' % var)
else:
assert False, args
for arg in args:
var = arg.split()[-1]
if 'out' not in var:
continue
if var.endswith('[]'):
var = cleanup(var[:-2])
if 'double' in arg:
print(' double %s' % var)
elif 'int' in arg:
print(' int %s' % var)
else:
assert False, args
elif var.startswith('*'):
var = cleanup(var[1:])
print(' int %s' % var)
else:
assert False, arg
for arg in args:
var = arg.split()[-1]
if 'out' in var:
break
if var.endswith('[]'):
var = cleanup(var[:-2])
if 'double' in arg:
cast = '<double*>'
elif 'int' in arg:
cast = '<int*>'
else:
assert False, arg
print(' if PyArray_TYPE(%s) != np.NPY_DOUBLE:' % var)
print(' raise Exception("%s is not double")' % var)
print(' if %s.ndim != 1:' % var)
print(' raise Exception("%s has wrong dimensions")' % var)
print(' if not (PyArray_FLAGS(%s) & np.NPY_C_CONTIGUOUS):' % var)
print(' %s = PyArray_GETCONTIGUOUS(%s)' % (var, var))
print(' %s_data = %s%s.data' % (var, cast, var))
# check all input array lengths are the same
seen = False
for arg in args:
var = arg.split()[-1]
if 'out' in var:
break
if var.endswith('[]'):
var = cleanup(var[:-2])
if not seen:
print(' length = %s.shape[0]' % var)
seen = True
else:
print(' if length != %s.shape[0]:' % var)
print(' raise Exception("input lengths are different")')
# check for all input values are non-NaN
seen = False
for arg in args:
var = arg.split()[-1]
if 'out' in var:
break
if var.endswith('[]') and 'double' in arg:
seen = True
break
for arg in args:
var = arg.split()[-1]
if 'out' not in var:
continue
if var.endswith('[]'):
var = cleanup(var[:-2])
if 'double' in arg:
print(' %s = NaN' % var)
elif 'int' in arg:
print(' %s = 0' % var)
else:
assert False, args
print(' retCode = lib.%s(' % name, end=' ')
for i, arg in enumerate(args):
if i > 0:
print(',', end=' ')
var = arg.split()[-1]
if var.endswith('[]'):
var = cleanup(var[:-2])
if 'out' in var:
print('&%s' % var, end=' ')
else:
print('%s_data' % var, end=' ')
elif var.startswith('*'):
var = cleanup(var[1:])
print('&%s' % var, end=' ')
elif var in ('startIdx', 'endIdx'):
print('length - 1', end= ' ')
else:
cleaned = cleanup(var)
print(cleaned, end=' ')
print(')')
print(' _ta_check_success("%s", retCode)' % name)
print(' return ', end='')
i = 0
for arg in args:
var = arg.split()[-1]
if var.endswith('[]'):
var = var[:-2]
elif var.startswith('*'):
var = var[1:]
if var.startswith('out'):
if var not in ("outNBElement", "outBegIdx"):
if i > 0:
print(',', end=' ')
i += 1
print(cleanup(var), end=' ')
else:
assert re.match('.*(void|startIdx|endIdx|opt|in)/*', arg), arg
print('')
print('')
| |
"""
Matplotlib Exporter
===================
This submodule contains tools for crawling a matplotlib figure and exporting
relevant pieces to a renderer.
"""
import warnings
import io
from . import utils
import matplotlib
from matplotlib import transforms
from matplotlib.backends.backend_agg import FigureCanvasAgg
class Exporter(object):
"""Matplotlib Exporter
Parameters
----------
renderer : Renderer object
The renderer object called by the exporter to create a figure
visualization. See mplexporter.Renderer for information on the
methods which should be defined within the renderer.
close_mpl : bool
If True (default), close the matplotlib figure as it is rendered. This
is useful for when the exporter is used within the notebook, or with
an interactive matplotlib backend.
"""
def __init__(self, renderer, close_mpl=True):
self.close_mpl = close_mpl
self.renderer = renderer
def run(self, fig):
"""
Run the exporter on the given figure
Parameters
---------
fig : matplotlib.Figure instance
The figure to export
"""
# Calling savefig executes the draw() command, putting elements
# in the correct place.
if fig.canvas is None:
fig.canvas = FigureCanvasAgg(fig)
fig.savefig(io.BytesIO(), format='png', dpi=fig.dpi)
if self.close_mpl:
import matplotlib.pyplot as plt
plt.close(fig)
self.crawl_fig(fig)
@staticmethod
def process_transform(transform, ax=None, data=None, return_trans=False,
force_trans=None):
"""Process the transform and convert data to figure or data coordinates
Parameters
----------
transform : matplotlib Transform object
The transform applied to the data
ax : matplotlib Axes object (optional)
The axes the data is associated with
data : ndarray (optional)
The array of data to be transformed.
return_trans : bool (optional)
If true, return the final transform of the data
force_trans : matplotlib.transform instance (optional)
If supplied, first force the data to this transform
Returns
-------
code : string
Code is either "data", "axes", "figure", or "display", indicating
the type of coordinates output.
transform : matplotlib transform
the transform used to map input data to output data.
Returned only if return_trans is True
new_data : ndarray
Data transformed to match the given coordinate code.
Returned only if data is specified
"""
if isinstance(transform, transforms.BlendedGenericTransform):
warnings.warn("Blended transforms not yet supported. "
"Zoom behavior may not work as expected.")
if force_trans is not None:
if data is not None:
data = (transform - force_trans).transform(data)
transform = force_trans
code = "display"
if ax is not None:
for (c, trans) in [("data", ax.transData),
("axes", ax.transAxes),
("figure", ax.figure.transFigure),
("display", transforms.IdentityTransform())]:
if transform.contains_branch(trans):
code, transform = (c, transform - trans)
break
if data is not None:
if return_trans:
return code, transform.transform(data), transform
else:
return code, transform.transform(data)
else:
if return_trans:
return code, transform
else:
return code
def crawl_fig(self, fig):
"""Crawl the figure and process all axes"""
with self.renderer.draw_figure(fig=fig,
props=utils.get_figure_properties(fig)):
for ax in fig.axes:
self.crawl_ax(ax)
def crawl_ax(self, ax):
"""Crawl the axes and process all elements within"""
with self.renderer.draw_axes(ax=ax,
props=utils.get_axes_properties(ax)):
for line in ax.lines:
self.draw_line(ax, line)
for text in ax.texts:
self.draw_text(ax, text)
for (text, ttp) in zip([ax.xaxis.label, ax.yaxis.label, ax.title],
["xlabel", "ylabel", "title"]):
if(hasattr(text, 'get_text') and text.get_text()):
self.draw_text(ax, text, force_trans=ax.transAxes,
text_type=ttp)
for artist in ax.artists:
# TODO: process other artists
if isinstance(artist, matplotlib.text.Text):
self.draw_text(ax, artist)
for patch in ax.patches:
self.draw_patch(ax, patch)
for collection in ax.collections:
self.draw_collection(ax, collection)
for image in ax.images:
self.draw_image(ax, image)
legend = ax.get_legend()
if legend is not None:
props = utils.get_legend_properties(ax, legend)
with self.renderer.draw_legend(legend=legend, props=props):
if props['visible']:
self.crawl_legend(ax, legend)
def crawl_legend(self, ax, legend):
"""
Recursively look through objects in legend children
"""
legendElements = list(utils.iter_all_children(legend._legend_box,
skipContainers=True))
legendElements.append(legend.legendPatch)
for child in legendElements:
# force a large zorder so it appears on top
child.set_zorder(1E6 + child.get_zorder())
try:
# What kind of object...
if isinstance(child, matplotlib.patches.Patch):
self.draw_patch(ax, child, force_trans=ax.transAxes)
elif isinstance(child, matplotlib.text.Text):
if not (child is legend.get_children()[-1]
and child.get_text() == 'None'):
self.draw_text(ax, child, force_trans=ax.transAxes)
elif isinstance(child, matplotlib.lines.Line2D):
warnings.warn("Legend element %s not implemented" % child)
elif isinstance(child, matplotlib.collections.Collection):
self.draw_collection(ax, child,
force_pathtrans=ax.transAxes)
else:
warnings.warn("Legend element %s not implemented" % child)
except NotImplementedError:
warnings.warn("Legend element %s not implemented" % child)
def draw_line(self, ax, line, force_trans=None):
"""Process a matplotlib line and call renderer.draw_line"""
coordinates, data = self.process_transform(line.get_transform(),
ax, line.get_xydata(),
force_trans=force_trans)
linestyle = utils.get_line_style(line)
if linestyle['dasharray'] is None:
linestyle = None
markerstyle = utils.get_marker_style(line)
if (markerstyle['marker'] in ['None', 'none', None]
or markerstyle['markerpath'][0].size == 0):
markerstyle = None
label = line.get_label()
if markerstyle or linestyle:
self.renderer.draw_marked_line(data=data, coordinates=coordinates,
linestyle=linestyle,
markerstyle=markerstyle,
label=label,
mplobj=line)
def draw_text(self, ax, text, force_trans=None, text_type=None):
"""Process a matplotlib text object and call renderer.draw_text"""
content = text.get_text()
if content:
transform = text.get_transform()
position = text.get_position()
coords, position = self.process_transform(transform, ax,
position,
force_trans=force_trans)
style = utils.get_text_style(text)
self.renderer.draw_text(text=content, position=position,
coordinates=coords,
text_type=text_type,
style=style, mplobj=text)
def draw_patch(self, ax, patch, force_trans=None):
"""Process a matplotlib patch object and call renderer.draw_path"""
vertices, pathcodes = utils.SVG_path(patch.get_path())
transform = patch.get_transform()
coordinates, vertices = self.process_transform(transform,
ax, vertices,
force_trans=force_trans)
linestyle = utils.get_path_style(patch, fill=patch.get_fill())
self.renderer.draw_path(data=vertices,
coordinates=coordinates,
pathcodes=pathcodes,
style=linestyle,
mplobj=patch)
def draw_collection(self, ax, collection,
force_pathtrans=None,
force_offsettrans=None):
"""Process a matplotlib collection and call renderer.draw_collection"""
(transform, transOffset,
offsets, paths) = collection._prepare_points()
offset_coords, offsets = self.process_transform(
transOffset, ax, offsets, force_trans=force_offsettrans)
path_coords = self.process_transform(
transform, ax, force_trans=force_pathtrans)
processed_paths = [utils.SVG_path(path) for path in paths]
processed_paths = [(self.process_transform(
transform, ax, path[0], force_trans=force_pathtrans)[1], path[1])
for path in processed_paths]
path_transforms = collection.get_transforms()
try:
# matplotlib 1.3: path_transforms are transform objects.
# Convert them to numpy arrays.
path_transforms = [t.get_matrix() for t in path_transforms]
except AttributeError:
# matplotlib 1.4: path transforms are already numpy arrays.
pass
styles = {'linewidth': collection.get_linewidths(),
'facecolor': collection.get_facecolors(),
'edgecolor': collection.get_edgecolors(),
'alpha': collection._alpha,
'zorder': collection.get_zorder()}
offset_dict = {"data": "before",
"screen": "after"}
offset_order = offset_dict[collection.get_offset_position()]
self.renderer.draw_path_collection(paths=processed_paths,
path_coordinates=path_coords,
path_transforms=path_transforms,
offsets=offsets,
offset_coordinates=offset_coords,
offset_order=offset_order,
styles=styles,
mplobj=collection)
def draw_image(self, ax, image):
"""Process a matplotlib image object and call renderer.draw_image"""
self.renderer.draw_image(imdata=utils.image_to_base64(image),
extent=image.get_extent(),
coordinates="data",
style={"alpha": image.get_alpha(),
"zorder": image.get_zorder()},
mplobj=image)
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RouteFilterRulesOperations(object):
"""RouteFilterRulesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilterRule"
"""Gets the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilterRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.RouteFilterRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
route_filter_rule_parameters, # type: "_models.RouteFilterRule"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilterRule"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'RouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
route_filter_rule_parameters, # type: "_models.RouteFilterRule"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteFilterRule"]
"""Creates or updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the create or update route filter
rule operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2019_08_01.models.RouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def _update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
route_filter_rule_parameters, # type: "_models.PatchRouteFilterRule"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilterRule"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'PatchRouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def begin_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
rule_name, # type: str
route_filter_rule_parameters, # type: "_models.PatchRouteFilterRule"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteFilterRule"]
"""Updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the update route filter rule
operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2019_08_01.models.PatchRouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def list_by_route_filter(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterRuleListResult"]
"""Gets all RouteFilterRules in a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.RouteFilterRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_route_filter.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_route_filter.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules'} # type: ignore
| |
"""
MagPy
LaTeX Table output filter
Written by Roman Leonhardt December 2012
- contains write function for hour data
ToDo: use longer files (anaylsze full year data if only monthly files are available
ToDo: add table footer
"""
from __future__ import print_function
from magpy.stream import *
from magpy.opt.Table import Table
def writeLATEX(datastream, filename, **kwargs):
"""
Writing WDC format data.
"""
mode = kwargs.get('mode')
keys = kwargs.get('keys')
if not keys:
keys = ['x','y','z','f']
header = datastream.header
iagacode = header.get('StationIAGAcode',"").upper()
caption = header.get('TEXcaption',"")
label = header.get('TEXlabel',"")
justs = header.get('TEXjusts',"") # e.g. 'lrccc'
rotate = header.get('TEXrotate',False)
tablewidth = header.get('TEXtablewidth',"")
tablenum = header.get('TEXtablenum',"")
fontsize = header.get('TEXfontsize',"")
if not tablewidth:
tablewidth = '0pt'
if not fontsize:
fontsize = '\\footnotesize'
keylst = datastream._get_key_headers()
if not 'x' in keylst or not 'y' in keylst or not 'z' in keylst:
print("formatWDC: writing WDC data requires at least x,y,z components")
return False
elif not 'f' in keylst:
keys = ['x','y','z']
ndtype = False
if len(datastream.ndarray[0]) > 0:
ndtype = True
datalen = datastream.length()[0]
if mode == 'wdc':
print("formatLATEX: Writing wdc mode")
# 1. determine sampling rate
samplinginterval = datastream.get_sampling_period()
# get difference between first and last time in days
ts,te = datastream._find_t_limits()
deltat = date2num(te)-date2num(ts)
#deltat = datastream[-1].time - datastream[0].time
if 0.8 < samplinginterval/30.0 < 1.2:
srange = 12
sint = 'month'
sintprev = 'year'
datestr = '%Y'
sideheadstr = ''
elif 0.8 < samplinginterval < 1.2:
srange = 31
sint = 'day'
sintprev = 'month'
datestr = '%m'
sideheadstr = '%Y'
elif 0.8 < samplinginterval*24 < 1.2:
srange = 24
sint = 'hour'
sintprev = 'day'
datestr = '%b%d'
sideheadstr = '%Y'
elif 0.8 < samplinginterval*24*60 < 1.2:
srange = 60
sint = 'minute'
sintprev = 'hour'
datestr = '%H'
sideheadstr = '%b%d %Y'
elif 0.8 < samplinginterval*24*3600 < 1.2:
srange = 60
sint = 'second'
sintprev = 'minute'
datestr = '%H:%M'
sideheadstr = '%b%d %Y'
else:
logging.error('Could not determine sampling rate for latex output: samplinginterval = %f days' % samplinginterval)
return
numcols = srange+1
headline = np.array(range(srange+1)).tolist()
headline[0] = sintprev
headline.append('mean')
if not justs:
justs = 'p'*(numcols+1)
#fout = open( filename, "wb" )
if sys.version_info >= (3,0,0):
fout = open(filename, "w", newline='')
else:
fout = open(filename, "wb")
t = Table(numcols+1, justs=justs, caption=caption, label=label, tablewidth=tablewidth, tablenum=tablenum, fontsize=fontsize, rotate=True)
t.add_header_row(headline)
aarray = [[] for key in KEYLIST]
barray = [[] for key in KEYLIST]
for key in keys:
pos = KEYLIST.index(key)
aarray[pos] = np.empty((srange+1,int(np.round(float(datalen)/float(srange))),))
aarray[pos][:] = np.nan
aarray[pos] = aarray[pos].tolist()
#aarray = list(aarray)
bar = datastream.ndarray[pos]
bar = bar[~np.isnan(bar)]
mbar = np.floor(np.min(bar)/100.)*100.
if np.max(bar) - mbar < 1000:
sigfigs = 3
"""
# exec('...' % key)
# here starts the key dependend analysis
exec('%sarray = np.empty((srange+1,int(np.round(float(datalen)/float(srange))),))' % key)
exec('%sarray[:] = np.NAN' % key)
exec('%sarray = %sarray.tolist()' % (key,key)) # using list, so that strings can be used
# get means and variation:
if ndtype:
ind = KEYLIST.index(key)
ar = datastream.ndarray[ind]
exec('%sar = ar' % key)
else:
exec('%sar = np.array([elem.%s for elem in datastream if not isnan(elem.%s)])' % (key,key,key))
exec('m%s = np.floor(np.min(%sar)/100)*100' % (key,key))
if np.max(eval(key+'ar')) - eval('m'+key) < 1000:
sigfigs = 3
"""
#for elem in datastream:
for i in range(datalen):
if not ndtype:
elem = datastream[i]
elemx = elem.x
elemy = elem.y
elemz = elem.z
elemf = elem.f
timeval = elem.time
else:
elem = datastream.ndarray[pos][i]
"""
elemx = datastream.ndarray[1][i]
elemy = datastream.ndarray[2][i]
elemz = datastream.ndarray[3][i]
elemf = datastream.ndarray[4][i]
"""
timeval = datastream.ndarray[0][i]
dateobj = num2date(timeval).replace(tzinfo=None)
currx = eval('dateobj.'+sint) + 1
curry = eval('dateobj.'+sintprev)-1
datecnt = datetime.strftime(num2date(timeval).replace(tzinfo=None),datestr)
aarray[pos][0][curry] = datecnt
aarray[pos][currx][curry] = elem-mbar
#exec('%sarray[0][curry] = datecnt' % key)
#exec('%sarray[currx][curry] = elem%s-m%s' % (key,key,key))
mecol = []
#addcollist = eval(key+'array')
addcollist = aarray[pos]
tmpar = np.array(addcollist)
tmpar = np.transpose(tmpar)
for i in range(len(tmpar)):
meanlst = []
for j in range(len(addcollist)):
meanlst.append(addcollist[j][i])
try:
if len(meanlst) > 24:
median = np.mean(meanlst[1:])
else:
median = float(NaN)
except:
median = float(NaN)
pass
mecol.append(median)
addcollist.append(mecol)
numcols = numcols+1
label = datetime.strftime(num2date(timeval).replace(tzinfo=None),sideheadstr) + ', Field component: ' + key.upper() + ', Base: ' + str(mbar) + ', Unit: ' + datastream.header.get('unit-col-'+key,'')
t.add_data(addcollist, label=label, labeltype='side', sigfigs=0)
t.print_table(fout)
fout.close()
return True
else:
numcols = len(keys)+1
if not justs:
justs = 'l'*numcols
headline = ['Date']
samplinginterval = datastream.get_sampling_period()
if 0.8 < samplinginterval/365.0 < 1.2:
datestr = '%Y'
elif 0.8 < samplinginterval/30.0 < 1.2:
datestr = '%Y-%m'
elif 0.8 < samplinginterval < 1.2:
datestr = '%Y-%m-%d'
elif 0.8 < samplinginterval*24 < 1.2:
datestr = '%Y-%m-%dT%H:%M'
else:
datestr = '%Y-%m-%dT%H:%M:%S.%f'
col1tmp = datastream._get_column('time')
col1 = []
for elem in col1tmp:
col1.append(datetime.strftime(num2date(elem),datestr))
addcollist = [col1]
for iter,key in enumerate(keys):
# extract headers
colhead = header.get('col-'+key," ")
if not header.get('unit-col-'+key,'') == '':
colhead = colhead+' $['+header.get('unit-col-'+key,'')+']$'
headline.append(colhead)
# Extract data and time columns
column = str(iter+2)
exec('col'+ column + ' = datastream._get_column(\'' + key + '\')')
addcollist.append(eval('col'+ column))
if sys.version_info >= (3,0,0):
fout = open(filename, "w", newline='')
else:
fout = open(filename, "wb")
t = Table(numcols, justs=justs, caption=caption, label=label, tablewidth=tablewidth, tablenum=tablenum, fontsize=fontsize, rotate=rotate)
t.add_header_row(headline)
#col3 = [[0.12345,0.1],[0.12345,0.01],[0.12345,0.001]]
t.add_data(addcollist, sigfigs=3)
t.print_table(fout)
fout.close()
return True
| |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.iam.credentials_v1.types import common
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import IAMCredentialsGrpcTransport
from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport
class IAMCredentialsClientMeta(type):
"""Metaclass for the IAMCredentials client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]]
_transport_registry["grpc"] = IAMCredentialsGrpcTransport
_transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport
def get_transport_class(cls,
label: str = None,
) -> Type[IAMCredentialsTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class IAMCredentialsClient(metaclass=IAMCredentialsClientMeta):
"""A service account is a special type of Google account that
belongs to your application or a virtual machine (VM), instead
of to an individual end user. Your application assumes the
identity of the service account to call Google APIs, so that the
users aren't directly involved.
Service account credentials are used to temporarily assume the
identity of the service account. Supported credential types
include OAuth 2.0 access tokens, OpenID Connect ID tokens,
self-signed JSON Web Tokens (JWTs), and more.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "iamcredentials.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IAMCredentialsClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IAMCredentialsClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> IAMCredentialsTransport:
"""Returns the transport used by the client instance.
Returns:
IAMCredentialsTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def service_account_path(project: str,service_account: str,) -> str:
"""Returns a fully-qualified service_account string."""
return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, )
@staticmethod
def parse_service_account_path(path: str) -> Dict[str,str]:
"""Parses a service_account path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/serviceAccounts/(?P<service_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str, ) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str,str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str, ) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder, )
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str,str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str, ) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization, )
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str,str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str, ) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project, )
@staticmethod
def parse_common_project_path(path: str) -> Dict[str,str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str, ) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(project=project, location=location, )
@staticmethod
def parse_common_location_path(path: str) -> Dict[str,str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(self, *,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, IAMCredentialsTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the iam credentials client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, IAMCredentialsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError("client_options.api_key and credentials are mutually exclusive")
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, IAMCredentialsTransport):
# transport is a IAMCredentialsTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError("When providing a transport instance, "
"provide its credentials directly.")
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
credentials = google.auth._default.get_api_key_credentials(api_key_value)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def generate_access_token(self,
request: Union[common.GenerateAccessTokenRequest, dict] = None,
*,
name: str = None,
delegates: Sequence[str] = None,
scope: Sequence[str] = None,
lifetime: duration_pb2.Duration = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> common.GenerateAccessTokenResponse:
r"""Generates an OAuth 2.0 access token for a service
account.
.. code-block:: python
from google.iam import credentials_v1
def sample_generate_access_token():
# Create a client
client = credentials_v1.IAMCredentialsClient()
# Initialize request argument(s)
request = credentials_v1.GenerateAccessTokenRequest(
name="name_value",
scope=['scope_value_1', 'scope_value_2'],
)
# Make the request
response = client.generate_access_token(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]):
The request object.
name (str):
Required. The resource name of the service account for
which the credentials are requested, in the following
format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
delegates (Sequence[str]):
The sequence of service accounts in a delegation chain.
Each service account must be granted the
``roles/iam.serviceAccountTokenCreator`` role on its
next service account in the chain. The last service
account in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the
service account that is specified in the ``name`` field
of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``delegates`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
scope (Sequence[str]):
Required. Code to identify the scopes
to be included in the OAuth 2.0 access
token. See
https://developers.google.com/identity/protocols/googlescopes
for more information.
At least one value required.
This corresponds to the ``scope`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
lifetime (google.protobuf.duration_pb2.Duration):
The desired lifetime duration of the
access token in seconds. Must be set to
a value less than or equal to 3600 (1
hour). If a value is not specified, the
token's lifetime will be set to a
default value of one hour.
This corresponds to the ``lifetime`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.credentials_v1.types.GenerateAccessTokenResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, delegates, scope, lifetime])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a common.GenerateAccessTokenRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, common.GenerateAccessTokenRequest):
request = common.GenerateAccessTokenRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if delegates is not None:
request.delegates = delegates
if scope is not None:
request.scope = scope
if lifetime is not None:
request.lifetime = lifetime
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.generate_access_token]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def generate_id_token(self,
request: Union[common.GenerateIdTokenRequest, dict] = None,
*,
name: str = None,
delegates: Sequence[str] = None,
audience: str = None,
include_email: bool = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> common.GenerateIdTokenResponse:
r"""Generates an OpenID Connect ID token for a service
account.
.. code-block:: python
from google.iam import credentials_v1
def sample_generate_id_token():
# Create a client
client = credentials_v1.IAMCredentialsClient()
# Initialize request argument(s)
request = credentials_v1.GenerateIdTokenRequest(
name="name_value",
audience="audience_value",
)
# Make the request
response = client.generate_id_token(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]):
The request object.
name (str):
Required. The resource name of the service account for
which the credentials are requested, in the following
format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
delegates (Sequence[str]):
The sequence of service accounts in a delegation chain.
Each service account must be granted the
``roles/iam.serviceAccountTokenCreator`` role on its
next service account in the chain. The last service
account in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the
service account that is specified in the ``name`` field
of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``delegates`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
audience (str):
Required. The audience for the token,
such as the API or account that this
token grants access to.
This corresponds to the ``audience`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
include_email (bool):
Include the service account email in the token. If set
to ``true``, the token will contain ``email`` and
``email_verified`` claims.
This corresponds to the ``include_email`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.credentials_v1.types.GenerateIdTokenResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, delegates, audience, include_email])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a common.GenerateIdTokenRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, common.GenerateIdTokenRequest):
request = common.GenerateIdTokenRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if delegates is not None:
request.delegates = delegates
if audience is not None:
request.audience = audience
if include_email is not None:
request.include_email = include_email
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.generate_id_token]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def sign_blob(self,
request: Union[common.SignBlobRequest, dict] = None,
*,
name: str = None,
delegates: Sequence[str] = None,
payload: bytes = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> common.SignBlobResponse:
r"""Signs a blob using a service account's system-managed
private key.
.. code-block:: python
from google.iam import credentials_v1
def sample_sign_blob():
# Create a client
client = credentials_v1.IAMCredentialsClient()
# Initialize request argument(s)
request = credentials_v1.SignBlobRequest(
name="name_value",
payload=b'payload_blob',
)
# Make the request
response = client.sign_blob(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]):
The request object.
name (str):
Required. The resource name of the service account for
which the credentials are requested, in the following
format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
delegates (Sequence[str]):
The sequence of service accounts in a delegation chain.
Each service account must be granted the
``roles/iam.serviceAccountTokenCreator`` role on its
next service account in the chain. The last service
account in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the
service account that is specified in the ``name`` field
of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``delegates`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
payload (bytes):
Required. The bytes to sign.
This corresponds to the ``payload`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.credentials_v1.types.SignBlobResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, delegates, payload])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a common.SignBlobRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, common.SignBlobRequest):
request = common.SignBlobRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if delegates is not None:
request.delegates = delegates
if payload is not None:
request.payload = payload
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.sign_blob]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def sign_jwt(self,
request: Union[common.SignJwtRequest, dict] = None,
*,
name: str = None,
delegates: Sequence[str] = None,
payload: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> common.SignJwtResponse:
r"""Signs a JWT using a service account's system-managed
private key.
.. code-block:: python
from google.iam import credentials_v1
def sample_sign_jwt():
# Create a client
client = credentials_v1.IAMCredentialsClient()
# Initialize request argument(s)
request = credentials_v1.SignJwtRequest(
name="name_value",
payload="payload_value",
)
# Make the request
response = client.sign_jwt(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]):
The request object.
name (str):
Required. The resource name of the service account for
which the credentials are requested, in the following
format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
delegates (Sequence[str]):
The sequence of service accounts in a delegation chain.
Each service account must be granted the
``roles/iam.serviceAccountTokenCreator`` role on its
next service account in the chain. The last service
account in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the
service account that is specified in the ``name`` field
of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
The ``-`` wildcard character is required; replacing it
with a project ID is invalid.
This corresponds to the ``delegates`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
payload (str):
Required. The JWT payload to sign: a
JSON object that contains a JWT Claims
Set.
This corresponds to the ``payload`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.credentials_v1.types.SignJwtResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, delegates, payload])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a common.SignJwtRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, common.SignJwtRequest):
request = common.SignJwtRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if delegates is not None:
request.delegates = delegates
if payload is not None:
request.payload = payload
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.sign_jwt]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-iam-credentials",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"IAMCredentialsClient",
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.