text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
import os
import io
import csv
import re
########################################################
# #
# Author: Noel Conlisk #
# Email: noecon@gmail.com #
# Script function: Using Stress data and model #
# geometry information this script builds a #
# vtk version of the model with stresses included #
# #
# Prerequisites: An .rpt file with stresses and #
# the corresponding Abaqus .inp format model file #
# #
########################################################
# Find all Abaqus .odb files in the directory and put them in a list
path = 'F:\\Modelling_files\\SoFIA3\\test_conversion'
# path = raw_input('input path to .odb file: ')
target_files = [f for f in os.listdir(path) if f.endswith('edited.inp')]
a = int(len(target_files))
# Load up .csv file and creates a dictionary where patient name is the key
# and a tuple of the x, y, z coordinates is the value
custom_offsets_file = 'F:\\Modelling_files\\origin_offsets2.csv'
custom_offsets = open((custom_offsets_file), 'r')
lines = csv.reader(custom_offsets, delimiter='\t')
offset = {}
for l in lines:
pat_id = l[0]
delta_x = l[1]
delta_y = l[2]
delta_z = l[3]
offset[pat_id] = delta_x, delta_y, delta_z
# Main loop
for i in range(0, a):
jobfile = target_files[i]
patient_ID = jobfile.split('.inp')[0]
filename = path+'\\'+jobfile
# THIS SECTION PLACES EACH LINE OF THE FILE CORRESPONDING TO THE NODAL
# COORDINATES INTO A LIST:
nodes = []
elements = []
with open(filename, 'r') as file:
for lines in file:
#this next line uses a regular expression to extract the line
# featuring the current node only when it conforms to the following
# pattern node id, x, y, z. note that \s* matches zero or more
# whitespaces,\d matches digits, \d+\.\d+ matches floats. This
# eliminates the program accidently picking up occurances of the
# node later when connectivity is being described.
nodal_lines=re.match(r'^\s*\d+,+\s+\d+\.\d+,+\s+\S+\s+\S+\s+$',lines)
#could simplify this expression
if nodal_lines:
nodes.append(lines)
# as with the nodes \s* matches zero or more whitespace values.
element_lines = re.match(r'\s*\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+$',lines)
if element_lines:
elements.append(lines)
file.close()
# The above code will give both nodal and element definitions from a .inp file.
# however, regional part labels are not preserved and so the .vtk file will be
# a single continuous mesh. - NEXT GOAL IS PRESERVE LABELS.
# This is format to get rid of first column "nodes[0].split(',')[1:]"
# Similarly to get first column to use as ID "nodes[0].strip().split(',')[0]"
with open(patient_ID + '_realigned.vtk', 'w') as outFile:
# write vtk header
outFile.write('# vtk DataFile Version 3.0')
outFile.write('\nvtk output')
outFile.write('\nASCII')
outFile.write('\nDATASET UNSTRUCTURED_GRID')
# write points
numNodesTotal = len(nodes)
outFile.write('\n\nPOINTS ' + str(numNodesTotal) + ' float\n')
# Checks if the current patient id is in the dictionary and if so
# retrives the correct offset values in each direction and converts
# them to a float.
current_pat_id = patient_ID.split('_')[0]
if current_pat_id in offset:
delta_x = float(offset[current_pat_id][0])
delta_y = (float(offset[current_pat_id][1]))/10.
# Divides by 10 to correct y offset.
delta_z = float(offset[current_pat_id][2])
# Extracts and reformats nodes applying above offsets.
for i in range(len(nodes)):
curNode = nodes[i]
node_coords = curNode.split(',')[1:]
for a in range(3):
if a == 0:
# this block inverts sign of y coordinates
# and corrects translations from vascops export error.
current_value = float(node_coords[a])
new_coord = current_value + delta_x
fix_x_direction = new_coord*1
coords_new = str(fix_x_direction)
node_coords1 = ' '+coords_new+''
outFile.write(node_coords1)
elif a == 1:
current_value = float(node_coords[a])
new_coord = current_value + delta_y
fix_y_direction = new_coord*-1
coords_new = str(fix_y_direction)
node_coords1 = ' '+coords_new+''
outFile.write(node_coords1)
else:
current_value = float(node_coords[a])
new_coord = current_value + delta_z
fix_z_direction = new_coord*1
coords_new = str(fix_z_direction)
node_coords1 = ' '+coords_new+'\n'
outFile.write(node_coords1)
# write cells
numElementsTotal = len(elements)
# eltype = len(elements[0])
eltype = 8
# COULD IMPROVE BY LETTING SCRIPT AUTOMATICALLY DETECT ELEMENT TYPE.
if eltype == 4:
# linear tet
vtk_el_ID = 10
elif eltype == 10:
# quad tet
vtk_el_ID = 24
elif eltype == 8:
# quad brick
vtk_el_ID = 12
param = eltype + 1
outFile.write('\n\nCELLS ' + str(numElementsTotal) + ' ' + str(numElementsTotal * param)+'\n')
for j in range(len(elements)):
# following line removes whitespace in the string
el_no_space = "".join(elements[j].split())
# this line searches the string for possible integers
el_list = [int(i) for i in el_no_space.split(',')]
# this line takes all ints except for the first row
# which is the element ID
curElement = el_list[1:]
# this line writes out the vtk element ID as the first row
outFile.write(str(eltype)+' ')
# this loop then writes out the connetivity as the remaining rows
for i in range(eltype):
el_number = curElement[i] - 1
# minus 1 as node numbering from 0 in vtk and 1 in abaqus
outFile.write(str(el_number)+' ')
outFile.write('\n')
# cell types
outFile.write('\n\nCELL_TYPES ' + str(numElementsTotal))
for i in range(numElementsTotal):
cell_type = '\n' + str(vtk_el_ID)
outFile.write(cell_type)
# write cell data
outFile.write('\n\nCELL_DATA ' + str(numElementsTotal))
# write point data
outFile.write('\n\nPOINT_DATA ' + str(numNodesTotal))
# UPDATE BELOW SECTION TO IMPORT STRESS VALUES FROM MODIFIED RPT FILE
# REPLACE PARAMS WITH REGEX TO FILTER RPT FILE TO JUST VALUES
# THEN USE CSV READER TO IMPORT COLUMNS
# ALSO ADJUST NODES TO VTK FORMAT
# The next line sets up an empty dictionary for the
# extracted stress values.
stress_values = {}
nodal_file = patient_ID + '_unique_nodal.rpt'
with open(path + '\\' + nodal_file, 'r') as stresses_rpt:
for lines in stresses_rpt:
value_lines = re.match(r'^\s*\d+\s+\S+\s+$', lines)
# issue is handling exponential numbers
if value_lines:
node = lines.split()[0]
key = int(node) - 1
# convert node numbering to vtk from abaqus
value = lines.split()[1]
stress_values.setdefault(key, [])
stress_values[key].append(value)
else:
print('no values found')
stresses_rpt.close()
# Converting dict contents to VTK format and add point data header.
print(patient_ID)
size = len(stress_values)
print(size)
outFile.write('\nSCALARS MISES double')
outFile.write('\nLOOKUP_TABLE default\n')
for i in range(0, size):
key_label = i
print(key_label)
if len(stress_values[key_label]) == 1:
nodal_mises_stress = stress_values[key_label][0]
outFile.write(str(nodal_mises_stress) + ' ')
else:
new_value1 = stress_values[key_label][0]
new_value2 = stress_values[key_label][1]
avg_stress = (float(new_value1) + float(new_value2))/2.0
outFile.write(str(avg_stress) + ' ')
# Need to modify the above to allow multiple values to be recognised
# as belonging to a single point.
# Can we just write out a list association as point data?
outFile.close()
|
nconlisk/python
|
VTK/abq_vtk_inp_v5_with_origins_and_stress.py
|
Python
|
gpl-3.0
| 9,161
|
[
"VTK"
] |
21e3dc746e266b6f4f0effee1b3b00bed363238b9c3f4d57414d1dde9f664cae
|
"""This is a minimal Python client for Mads Haahr's random number generator at www.random.org
# This tiny set of functions only implements a subset of the HTTP interface available. In particular it only uses the 'live'
# random number generator, and doesn't offer the option of using the alternative 'stored' random
# number sets. However, it should be obvious how to extend it by sending requests with different parameters.
# The web service code is modelled on Mark Pilgrim's "Dive into Python" tutorial at http://www.diveintopython.org/http_web_services
# This client by George Dunbar, University of Warwick (Copyright George Dunbar, 2008)
# It is distributed under the Gnu General Public License.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
See <http://www.gnu.org/licenses/> for a copy of the GNU General Public License.
For use that falls outside this license, please contact me.
To use in a python script or at the interactive prompt
(randomwrapy.py has to be in the Python search path, of course):
from randomwrapy import *
rnumlistwithoutreplacement(0, 12)
# returns a list of the integers 0 - 12 inclusive, in a random order
rnumlistwithreplacement(12, 5)
# returns 12 integers from the range [0, 5]
rnumlistwithreplacement(12, 5, 2)
# returns 12 integers from the range [2, 5]
rrandom()
# returns a random float in the range [0, 1]
reportquota()
# tells you how many bits you have available; visit www.random.org/quota for more information
Arguments where given are (must be) numbers, of course.
There is almost no error checking in these scripts! For example, if
the web site is down, Python will simply raise an exception and report the
http error code. See worldrandom.py for an alternative implementation
that goes a little further with error checking.
"""
from six.moves import urllib
def rnumlistwithoutreplacement(min, max):
"""Returns a randomly ordered list of the integers between min and max"""
if checkquota() < 1:
raise Exception("Your www.random.org quota has already run out.")
requestparam = build_request_parameterNR(min, max)
request = urllib.request.Request(requestparam)
request.add_header('User-Agent', 'randomwrapy/0.1 very alpha')
opener = urllib.request.build_opener()
numlist = opener.open(request).read()
return numlist.split()
# helper
def build_request_parameterNR(min, max):
randomorg = 'http://www.random.org/sequences/?min='
vanilla = '&format=plain&rnd=new'
params = str(min) + '&max=' + str(max)
return randomorg + params + vanilla
def rnumlistwithreplacement(howmany, max, min=0):
"""Returns a list of howmany integers with a maximum value = max.
The minimum value defaults to zero."""
if checkquota() < 1:
raise Exception("Your www.random.org quota has already run out.")
requestparam = build_request_parameterWR(howmany, min, max)
request = urllib.request.Request(requestparam)
request.add_header('User-Agent', 'randomwrapy/0.1 very alpha')
opener = urllib.request.build_opener()
numlist = opener.open(request).read()
return numlist.split()
"""
Example usage:
Roll a dice 12 times (returning integers in the range [0,5]):
rnumlistwithreplacement(12, 5)
Roll a dice 12 times (returning integers in the more familiar range [1,6]):
rnumlistwithreplacement(12, 6, 1)
"""
# helper
def build_request_parameterWR(howmany, min, max):
randomorg = 'http://www.random.org/integers/?num='
vanilla = '&col=1&base=10&format=plain&rnd=new'
params = str(howmany) + '&min=' + str(min) + '&max=' + str(max)
return randomorg + params + vanilla
# next function is prototype for integration with random module of python
# see worldrandom module for a more developed implementation
def rrandom():
"""Get the next random number in the range [0.0, 1.0].
Returns a float."""
import urllib.request
import urllib.error
import urllib.parse
if checkquota() < 1:
raise Exception("Your www.random.org quota has already run out.")
request = urllib.request.Request(
'http://www.random.org/integers/?num=1&min=0&max=1000000000&col=1&base=10&format=plain&rnd=new')
request.add_header('User-Agent', 'randomwrapy/0.1 very alpha')
opener = urllib.request.build_opener()
numlist = opener.open(request).read()
num = numlist.split()[0]
return float(num) / 1000000000
def checkquota():
request = urllib.request.Request(
"http://www.random.org/quota/?format=plain")
request.add_header('User-Agent', 'randomwrapy/0.1 very alpha')
opener = urllib.request.build_opener()
quota = opener.open(request).read()
return int(quota)
def reportquota():
request = urllib.request.Request(
"http://www.random.org/quota/?format=plain")
request.add_header('User-Agent', 'randomwrapy/0.1 very alpha')
opener = urllib.request.build_opener()
quota = opener.open(request).read()
print("This IP address has", quota,
"bits left. Visit http://www.random.org/quota for more information.")
|
Erotemic/utool
|
utool/_internal/randomwrap.py
|
Python
|
apache-2.0
| 5,537
|
[
"VisIt"
] |
27f6c18f339070145e41c2cb7dc3e8d51b7bcbffb931c4c3ef03a6c85b2e9a91
|
##########################################################################
#
# Copyright 2008-2010 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""C basic types"""
import debug
class Type:
"""Base class for all types."""
__tags = set()
def __init__(self, expr, tag = None):
self.expr = expr
# Generate a default tag, used when naming functions that will operate
# on this type, so it should preferrably be something representative of
# the type.
if tag is None:
if expr is not None:
tag = ''.join([c for c in expr if c.isalnum() or c in '_'])
else:
tag = 'anonynoums'
else:
for c in tag:
assert c.isalnum() or c in '_'
# Ensure it is unique.
if tag in Type.__tags:
suffix = 1
while tag + str(suffix) in Type.__tags:
suffix += 1
tag += str(suffix)
assert tag not in Type.__tags
Type.__tags.add(tag)
self.tag = tag
def __str__(self):
"""Return the C/C++ type expression for this type."""
return self.expr
def visit(self, visitor, *args, **kwargs):
raise NotImplementedError
def mutable(self):
'''Return a mutable version of this type.
Convenience wrapper around MutableRebuilder.'''
visitor = MutableRebuilder()
return visitor.visit(self)
class _Void(Type):
"""Singleton void type."""
def __init__(self):
Type.__init__(self, "void")
def visit(self, visitor, *args, **kwargs):
return visitor.visitVoid(self, *args, **kwargs)
Void = _Void()
class Literal(Type):
"""Class to describe literal types.
Types which are not defined in terms of other types, such as integers and
floats."""
def __init__(self, expr, kind):
Type.__init__(self, expr)
self.kind = kind
def visit(self, visitor, *args, **kwargs):
return visitor.visitLiteral(self, *args, **kwargs)
Bool = Literal("bool", "Bool")
SChar = Literal("signed char", "SInt")
UChar = Literal("unsigned char", "UInt")
Short = Literal("short", "SInt")
Int = Literal("int", "SInt")
Long = Literal("long", "SInt")
LongLong = Literal("long long", "SInt")
UShort = Literal("unsigned short", "UInt")
UInt = Literal("unsigned int", "UInt")
ULong = Literal("unsigned long", "UInt")
ULongLong = Literal("unsigned long long", "UInt")
Float = Literal("float", "Float")
Double = Literal("double", "Double")
SizeT = Literal("size_t", "UInt")
Char = Literal("char", "SInt")
WChar = Literal("wchar_t", "SInt")
Int8 = Literal("int8_t", "SInt")
UInt8 = Literal("uint8_t", "UInt")
Int16 = Literal("int16_t", "SInt")
UInt16 = Literal("uint16_t", "UInt")
Int32 = Literal("int32_t", "SInt")
UInt32 = Literal("uint32_t", "UInt")
Int64 = Literal("int64_t", "SInt")
UInt64 = Literal("uint64_t", "UInt")
IntPtr = Literal("intptr_t", "SInt")
UIntPtr = Literal("uintptr_t", "UInt")
class Const(Type):
def __init__(self, type):
# While "const foo" and "foo const" are synonymous, "const foo *" and
# "foo * const" are not quite the same, and some compilers do enforce
# strict const correctness.
if type.expr.startswith("const ") or '*' in type.expr:
expr = type.expr + " const"
else:
# The most legible
expr = "const " + type.expr
Type.__init__(self, expr, 'C' + type.tag)
self.type = type
def visit(self, visitor, *args, **kwargs):
return visitor.visitConst(self, *args, **kwargs)
class Pointer(Type):
def __init__(self, type):
Type.__init__(self, type.expr + " *", 'P' + type.tag)
self.type = type
def visit(self, visitor, *args, **kwargs):
return visitor.visitPointer(self, *args, **kwargs)
class IntPointer(Type):
'''Integer encoded as a pointer.'''
def visit(self, visitor, *args, **kwargs):
return visitor.visitIntPointer(self, *args, **kwargs)
class ObjPointer(Type):
'''Pointer to an object.'''
def __init__(self, type):
Type.__init__(self, type.expr + " *", 'P' + type.tag)
self.type = type
def visit(self, visitor, *args, **kwargs):
return visitor.visitObjPointer(self, *args, **kwargs)
class LinearPointer(Type):
'''Pointer to a linear range of memory.'''
def __init__(self, type, size = None):
Type.__init__(self, type.expr + " *", 'P' + type.tag)
self.type = type
self.size = size
def visit(self, visitor, *args, **kwargs):
return visitor.visitLinearPointer(self, *args, **kwargs)
class Reference(Type):
'''C++ references.'''
def __init__(self, type):
Type.__init__(self, type.expr + " &", 'R' + type.tag)
self.type = type
def visit(self, visitor, *args, **kwargs):
return visitor.visitReference(self, *args, **kwargs)
class Handle(Type):
def __init__(self, name, type, range=None, key=None):
Type.__init__(self, type.expr, 'P' + type.tag)
self.name = name
self.type = type
self.range = range
self.key = key
def visit(self, visitor, *args, **kwargs):
return visitor.visitHandle(self, *args, **kwargs)
def ConstPointer(type):
return Pointer(Const(type))
class Enum(Type):
__id = 0
def __init__(self, name, values):
Type.__init__(self, name)
self.id = Enum.__id
Enum.__id += 1
self.values = list(values)
def visit(self, visitor, *args, **kwargs):
return visitor.visitEnum(self, *args, **kwargs)
def FakeEnum(type, values):
return Enum(type.expr, values)
class Bitmask(Type):
__id = 0
def __init__(self, type, values):
Type.__init__(self, type.expr)
self.id = Bitmask.__id
Bitmask.__id += 1
self.type = type
self.values = values
def visit(self, visitor, *args, **kwargs):
return visitor.visitBitmask(self, *args, **kwargs)
Flags = Bitmask
class Array(Type):
def __init__(self, type, length):
Type.__init__(self, type.expr + " *")
self.type = type
self.length = length
def visit(self, visitor, *args, **kwargs):
return visitor.visitArray(self, *args, **kwargs)
class Blob(Type):
def __init__(self, type, size):
Type.__init__(self, type.expr + ' *')
self.type = type
self.size = size
def visit(self, visitor, *args, **kwargs):
return visitor.visitBlob(self, *args, **kwargs)
class Struct(Type):
__id = 0
def __init__(self, name, members):
Type.__init__(self, name)
self.id = Struct.__id
Struct.__id += 1
self.name = name
self.members = members
def visit(self, visitor, *args, **kwargs):
return visitor.visitStruct(self, *args, **kwargs)
def Union(kindExpr, kindTypes, contextLess=True):
switchTypes = []
for kindCase, kindType, kindMemberName in kindTypes:
switchType = Struct(None, [(kindType, kindMemberName)])
switchTypes.append((kindCase, switchType))
return Polymorphic(kindExpr, switchTypes, contextLess=contextLess)
class Alias(Type):
def __init__(self, expr, type):
Type.__init__(self, expr)
self.type = type
def visit(self, visitor, *args, **kwargs):
return visitor.visitAlias(self, *args, **kwargs)
class Arg:
def __init__(self, type, name, input=True, output=False):
self.type = type
self.name = name
self.input = input
self.output = output
self.index = None
def __str__(self):
return '%s %s' % (self.type, self.name)
def In(type, name):
return Arg(type, name, input=True, output=False)
def Out(type, name):
return Arg(type, name, input=False, output=True)
def InOut(type, name):
return Arg(type, name, input=True, output=True)
class Function:
def __init__(self, type, name, args, call = '', fail = None, sideeffects=True, internal=False):
self.type = type
self.name = name
self.args = []
index = 0
for arg in args:
if not isinstance(arg, Arg):
if isinstance(arg, tuple):
arg_type, arg_name = arg
else:
arg_type = arg
arg_name = "arg%u" % index
arg = Arg(arg_type, arg_name)
arg.index = index
index += 1
self.args.append(arg)
self.call = call
self.fail = fail
self.sideeffects = sideeffects
self.internal = internal
def prototype(self, name=None):
if name is not None:
name = name.strip()
else:
name = self.name
s = name
if self.call:
s = self.call + ' ' + s
if name.startswith('*'):
s = '(' + s + ')'
s = self.type.expr + ' ' + s
s += "("
if self.args:
s += ", ".join(["%s %s" % (arg.type, arg.name) for arg in self.args])
else:
s += "void"
s += ")"
return s
def argNames(self):
return [arg.name for arg in self.args]
def getArgByName(self, name):
for arg in self.args:
if arg.name == name:
return arg
return None
def StdFunction(*args, **kwargs):
kwargs.setdefault('call', '__stdcall')
return Function(*args, **kwargs)
def FunctionPointer(type, name, args, **kwargs):
# XXX: We should probably treat function pointers (callbacks or not) in a generic fashion
return Opaque(name)
class Interface(Type):
def __init__(self, name, base=None):
Type.__init__(self, name)
self.name = name
self.base = base
self.methods = []
def visit(self, visitor, *args, **kwargs):
return visitor.visitInterface(self, *args, **kwargs)
def getMethodByName(self, name):
for method in self.iterMethods():
if method.name == name:
return method
return None
def iterMethods(self):
if self.base is not None:
for method in self.base.iterMethods():
yield method
for method in self.methods:
yield method
raise StopIteration
def iterBases(self):
iface = self
while iface is not None:
yield iface
iface = iface.base
raise StopIteration
def hasBase(self, *bases):
for iface in self.iterBases():
if iface in bases:
return True
return False
def iterBaseMethods(self):
if self.base is not None:
for iface, method in self.base.iterBaseMethods():
yield iface, method
for method in self.methods:
yield self, method
raise StopIteration
class Method(Function):
def __init__(self, type, name, args, call = '', const=False, sideeffects=True):
assert call == '__stdcall'
Function.__init__(self, type, name, args, call = call, sideeffects=sideeffects)
for index in range(len(self.args)):
self.args[index].index = index + 1
self.const = const
def prototype(self, name=None):
s = Function.prototype(self, name)
if self.const:
s += ' const'
return s
def StdMethod(*args, **kwargs):
kwargs.setdefault('call', '__stdcall')
return Method(*args, **kwargs)
class String(Type):
'''Human-legible character string.'''
def __init__(self, type = Char, length = None, wide = False):
assert isinstance(type, Type)
Type.__init__(self, type.expr + ' *')
self.type = type
self.length = length
self.wide = wide
def visit(self, visitor, *args, **kwargs):
return visitor.visitString(self, *args, **kwargs)
class Opaque(Type):
'''Opaque pointer.'''
def __init__(self, expr):
Type.__init__(self, expr)
def visit(self, visitor, *args, **kwargs):
return visitor.visitOpaque(self, *args, **kwargs)
def OpaquePointer(type, *args):
return Opaque(type.expr + ' *')
def OpaqueArray(type, size):
return Opaque(type.expr + ' *')
def OpaqueBlob(type, size):
return Opaque(type.expr + ' *')
class Polymorphic(Type):
def __init__(self, switchExpr, switchTypes, defaultType=None, contextLess=True):
if defaultType is None:
Type.__init__(self, None)
contextLess = False
else:
Type.__init__(self, defaultType.expr)
self.switchExpr = switchExpr
self.switchTypes = switchTypes
self.defaultType = defaultType
self.contextLess = contextLess
def visit(self, visitor, *args, **kwargs):
return visitor.visitPolymorphic(self, *args, **kwargs)
def iterSwitch(self):
cases = []
types = []
if self.defaultType is not None:
cases.append(['default'])
types.append(self.defaultType)
for expr, type in self.switchTypes:
case = 'case %s' % expr
try:
i = types.index(type)
except ValueError:
cases.append([case])
types.append(type)
else:
cases[i].append(case)
return zip(cases, types)
def EnumPolymorphic(enumName, switchExpr, switchTypes, defaultType, contextLess=True):
enumValues = [expr for expr, type in switchTypes]
enum = Enum(enumName, enumValues)
polymorphic = Polymorphic(switchExpr, switchTypes, defaultType, contextLess)
return enum, polymorphic
class Visitor:
'''Abstract visitor for the type hierarchy.'''
def visit(self, type, *args, **kwargs):
return type.visit(self, *args, **kwargs)
def visitVoid(self, void, *args, **kwargs):
raise NotImplementedError
def visitLiteral(self, literal, *args, **kwargs):
raise NotImplementedError
def visitString(self, string, *args, **kwargs):
raise NotImplementedError
def visitConst(self, const, *args, **kwargs):
raise NotImplementedError
def visitStruct(self, struct, *args, **kwargs):
raise NotImplementedError
def visitArray(self, array, *args, **kwargs):
raise NotImplementedError
def visitBlob(self, blob, *args, **kwargs):
raise NotImplementedError
def visitEnum(self, enum, *args, **kwargs):
raise NotImplementedError
def visitBitmask(self, bitmask, *args, **kwargs):
raise NotImplementedError
def visitPointer(self, pointer, *args, **kwargs):
raise NotImplementedError
def visitIntPointer(self, pointer, *args, **kwargs):
raise NotImplementedError
def visitObjPointer(self, pointer, *args, **kwargs):
raise NotImplementedError
def visitLinearPointer(self, pointer, *args, **kwargs):
raise NotImplementedError
def visitReference(self, reference, *args, **kwargs):
raise NotImplementedError
def visitHandle(self, handle, *args, **kwargs):
raise NotImplementedError
def visitAlias(self, alias, *args, **kwargs):
raise NotImplementedError
def visitOpaque(self, opaque, *args, **kwargs):
raise NotImplementedError
def visitInterface(self, interface, *args, **kwargs):
raise NotImplementedError
def visitPolymorphic(self, polymorphic, *args, **kwargs):
raise NotImplementedError
#return self.visit(polymorphic.defaultType, *args, **kwargs)
class OnceVisitor(Visitor):
'''Visitor that guarantees that each type is visited only once.'''
def __init__(self):
self.__visited = set()
def visit(self, type, *args, **kwargs):
if type not in self.__visited:
self.__visited.add(type)
return type.visit(self, *args, **kwargs)
return None
class Rebuilder(Visitor):
'''Visitor which rebuild types as it visits them.
By itself it is a no-op -- it is intended to be overwritten.
'''
def visitVoid(self, void):
return void
def visitLiteral(self, literal):
return literal
def visitString(self, string):
string_type = self.visit(string.type)
if string_type is string.type:
return string
else:
return String(string_type, string.length, string.wide)
def visitConst(self, const):
const_type = self.visit(const.type)
if const_type is const.type:
return const
else:
return Const(const_type)
def visitStruct(self, struct):
members = [(self.visit(type), name) for type, name in struct.members]
return Struct(struct.name, members)
def visitArray(self, array):
type = self.visit(array.type)
return Array(type, array.length)
def visitBlob(self, blob):
type = self.visit(blob.type)
return Blob(type, blob.size)
def visitEnum(self, enum):
return enum
def visitBitmask(self, bitmask):
type = self.visit(bitmask.type)
return Bitmask(type, bitmask.values)
def visitPointer(self, pointer):
pointer_type = self.visit(pointer.type)
if pointer_type is pointer.type:
return pointer
else:
return Pointer(pointer_type)
def visitIntPointer(self, pointer):
return pointer
def visitObjPointer(self, pointer):
pointer_type = self.visit(pointer.type)
if pointer_type is pointer.type:
return pointer
else:
return ObjPointer(pointer_type)
def visitLinearPointer(self, pointer):
pointer_type = self.visit(pointer.type)
if pointer_type is pointer.type:
return pointer
else:
return LinearPointer(pointer_type)
def visitReference(self, reference):
reference_type = self.visit(reference.type)
if reference_type is reference.type:
return reference
else:
return Reference(reference_type)
def visitHandle(self, handle):
handle_type = self.visit(handle.type)
if handle_type is handle.type:
return handle
else:
return Handle(handle.name, handle_type, range=handle.range, key=handle.key)
def visitAlias(self, alias):
alias_type = self.visit(alias.type)
if alias_type is alias.type:
return alias
else:
return Alias(alias.expr, alias_type)
def visitOpaque(self, opaque):
return opaque
def visitInterface(self, interface, *args, **kwargs):
return interface
def visitPolymorphic(self, polymorphic):
switchExpr = polymorphic.switchExpr
switchTypes = [(expr, self.visit(type)) for expr, type in polymorphic.switchTypes]
if polymorphic.defaultType is None:
defaultType = None
else:
defaultType = self.visit(polymorphic.defaultType)
return Polymorphic(switchExpr, switchTypes, defaultType, polymorphic.contextLess)
class MutableRebuilder(Rebuilder):
'''Type visitor which derives a mutable type.'''
def visitString(self, string):
return string
def visitConst(self, const):
# Strip out const qualifier
return const.type
def visitAlias(self, alias):
# Tear the alias on type changes
type = self.visit(alias.type)
if type is alias.type:
return alias
return type
def visitReference(self, reference):
# Strip out references
return reference.type
class Traverser(Visitor):
'''Visitor which all types.'''
def visitVoid(self, void, *args, **kwargs):
pass
def visitLiteral(self, literal, *args, **kwargs):
pass
def visitString(self, string, *args, **kwargs):
pass
def visitConst(self, const, *args, **kwargs):
self.visit(const.type, *args, **kwargs)
def visitStruct(self, struct, *args, **kwargs):
for type, name in struct.members:
self.visit(type, *args, **kwargs)
def visitArray(self, array, *args, **kwargs):
self.visit(array.type, *args, **kwargs)
def visitBlob(self, array, *args, **kwargs):
pass
def visitEnum(self, enum, *args, **kwargs):
pass
def visitBitmask(self, bitmask, *args, **kwargs):
self.visit(bitmask.type, *args, **kwargs)
def visitPointer(self, pointer, *args, **kwargs):
self.visit(pointer.type, *args, **kwargs)
def visitIntPointer(self, pointer, *args, **kwargs):
pass
def visitObjPointer(self, pointer, *args, **kwargs):
self.visit(pointer.type, *args, **kwargs)
def visitLinearPointer(self, pointer, *args, **kwargs):
self.visit(pointer.type, *args, **kwargs)
def visitReference(self, reference, *args, **kwargs):
self.visit(reference.type, *args, **kwargs)
def visitHandle(self, handle, *args, **kwargs):
self.visit(handle.type, *args, **kwargs)
def visitAlias(self, alias, *args, **kwargs):
self.visit(alias.type, *args, **kwargs)
def visitOpaque(self, opaque, *args, **kwargs):
pass
def visitInterface(self, interface, *args, **kwargs):
if interface.base is not None:
self.visit(interface.base, *args, **kwargs)
for method in interface.iterMethods():
for arg in method.args:
self.visit(arg.type, *args, **kwargs)
self.visit(method.type, *args, **kwargs)
def visitPolymorphic(self, polymorphic, *args, **kwargs):
for expr, type in polymorphic.switchTypes:
self.visit(type, *args, **kwargs)
if polymorphic.defaultType is not None:
self.visit(polymorphic.defaultType, *args, **kwargs)
class Collector(Traverser):
'''Visitor which collects all unique types as it traverses them.'''
def __init__(self):
self.__visited = set()
self.types = []
def visit(self, type):
if type in self.__visited:
return
self.__visited.add(type)
Visitor.visit(self, type)
self.types.append(type)
class ExpanderMixin:
'''Mixin class that provides a bunch of methods to expand C expressions
from the specifications.'''
__structs = None
__indices = None
def expand(self, expr):
# Expand a C expression, replacing certain variables
if not isinstance(expr, basestring):
return expr
variables = {}
if self.__structs is not None:
variables['self'] = '(%s)' % self.__structs[0]
if self.__indices is not None:
variables['i'] = self.__indices[0]
expandedExpr = expr.format(**variables)
if expandedExpr != expr and 0:
sys.stderr.write(" %r -> %r\n" % (expr, expandedExpr))
return expandedExpr
def visitMember(self, member, structInstance, *args, **kwargs):
memberType, memberName = member
if memberName is None:
# Anonymous structure/union member
memberInstance = structInstance
else:
memberInstance = '(%s).%s' % (structInstance, memberName)
self.__structs = (structInstance, self.__structs)
try:
return self.visit(memberType, memberInstance, *args, **kwargs)
finally:
_, self.__structs = self.__structs
def visitElement(self, elementIndex, elementType, *args, **kwargs):
self.__indices = (elementIndex, self.__indices)
try:
return self.visit(elementType, *args, **kwargs)
finally:
_, self.__indices = self.__indices
class Module:
'''A collection of functions.'''
def __init__(self, name = None):
self.name = name
self.headers = []
self.functions = []
self.interfaces = []
def addFunctions(self, functions):
self.functions.extend(functions)
def addInterfaces(self, interfaces):
self.interfaces.extend(interfaces)
def mergeModule(self, module):
self.headers.extend(module.headers)
self.functions.extend(module.functions)
self.interfaces.extend(module.interfaces)
def getFunctionByName(self, name):
for function in self.functions:
if function.name == name:
return function
return None
class API:
'''API abstraction.
Essentially, a collection of types, functions, and interfaces.
'''
def __init__(self, modules = None):
self.modules = []
if modules is not None:
self.modules.extend(modules)
def getAllTypes(self):
collector = Collector()
for module in self.modules:
for function in module.functions:
for arg in function.args:
collector.visit(arg.type)
collector.visit(function.type)
for interface in module.interfaces:
collector.visit(interface)
for method in interface.iterMethods():
for arg in method.args:
collector.visit(arg.type)
collector.visit(method.type)
return collector.types
def getAllFunctions(self):
functions = []
for module in self.modules:
functions.extend(module.functions)
return functions
def getAllInterfaces(self):
types = self.getAllTypes()
interfaces = [type for type in types if isinstance(type, Interface)]
for module in self.modules:
for interface in module.interfaces:
if interface not in interfaces:
interfaces.append(interface)
return interfaces
def addModule(self, module):
self.modules.append(module)
def getFunctionByName(self, name):
for module in self.modules:
for function in module.functions:
if function.name == name:
return function
return None
# C string (i.e., zero terminated)
CString = String(Char)
WString = String(WChar, wide=True)
ConstCString = String(Const(Char))
ConstWString = String(Const(WChar), wide=True)
|
PeterLValve/apitrace
|
specs/stdapi.py
|
Python
|
mit
| 27,491
|
[
"VisIt"
] |
efaef5a5d790df62fe365e5b0f27df033207415e311388d2b2db1209e831a374
|
# -*- coding: utf-8 -*-
#
# Molecular Blender
# Filename: stylers.py
# Copyright (C) 2014 Shane Parker, Joshua Szekely
#
# This file is part of Molecular Blender.
#
# Molecular Blender is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# Molecular Blender is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Molecular Blender; see COPYING.
# If not, see <http://www.gnu.org/licenses/>.
#
"""Collection of classes to construct default materials/colors for plotting"""
import bpy
import mathutils
class PaletteElementStyler(object):
"""Base class to color atom based on element"""
bondcolor = (0.9, 0.9, 0.9)
ringcolor = (0.0, 0.9, 0.0)
chargeminuscolor = (1.0, 0.0, 0.0)
chargepluscolor = (1.0, 0.0, 0.0)
isominuscolor = (0.0, 1.0, 0.0)
isopluscolor = (1.0, 0.0, 1.0)
def atom_material(self, name, element):
"""Return atom material"""
mat = bpy.data.materials.new(name)
mat.diffuse_color = mathutils.Color(self.element_color(element))
return mat
def bond_material(self, name, bond):
"""Return bond material"""
mat = bpy.data.materials.new(name)
mat.diffuse_color = mathutils.Color(self.bondcolor)
return mat
def ring_material(self, name):
"""Return ring material"""
mat = bpy.data.materials.new(name)
mat.diffuse_color = mathutils.Color(self.ringcolor)
return mat
def charge_material(self, pname, mname, element):
"""Return charge material"""
pmat = bpy.data.materials.new(pname)
mmat = bpy.data.materials.new(mname)
pmat.diffuse_color = mathutils.Color(self.chargepluscolor)
mmat.diffuse_color = mathutils.Color(self.chargeminuscolor)
return pmat, mmat
def isosurface_material(self, isoname):
"""Return isosurface material"""
out = bpy.data.materials.new(isoname)
out.diffuse_color = mathutils.Color(self.isopluscolor if "plus" in isoname else self.isominuscolor)
return out
def element_color(self, element):
"""Returns RGB triple for element"""
return self.palette[element.symbol]
class DefaultElementStyler(PaletteElementStyler):
"""Color elements with Molecular Blender defaults"""
def __init__(self):
"""Construct empty colorizer"""
self.palette = {
'h': (1.000, 1.000, 1.000),
'he': (0.851, 1.000, 1.000),
'li': (0.800, 0.502, 1.000),
'be': (0.761, 1.000, 0.000),
'b': (1.000, 0.710, 0.710),
'c': (0.565, 0.565, 0.565),
'n': (0.188, 0.314, 0.973),
'o': (1.000, 0.051, 0.051),
'f': (0.565, 0.878, 0.314),
'ne': (0.702, 0.890, 0.961),
'na': (0.671, 0.361, 0.949),
'mg': (0.541, 1.000, 0.000),
'al': (0.749, 0.651, 0.651),
'si': (0.941, 0.784, 0.627),
'p': (1.000, 0.502, 0.000),
's': (1.000, 1.000, 0.188),
'cl': (0.122, 0.941, 0.122),
'ar': (0.502, 0.820, 0.890),
'k': (0.561, 0.251, 0.831),
'ca': (0.239, 1.000, 0.000),
'sc': (0.902, 0.902, 0.902),
'ti': (0.749, 0.761, 0.780),
'v': (0.651, 0.651, 0.671),
'cr': (0.541, 0.600, 0.780),
'mn': (0.612, 0.478, 0.780),
'fe': (0.878, 0.400, 0.200),
'co': (0.941, 0.565, 0.627),
'ni': (0.314, 0.816, 0.314),
'cu': (0.784, 0.502, 0.200),
'zn': (0.490, 0.502, 0.690),
'ga': (0.761, 0.561, 0.561),
'ge': (0.400, 0.561, 0.561),
'as': (0.741, 0.502, 0.890),
'se': (1.000, 0.631, 0.000),
'br': (0.651, 0.161, 0.161),
'kr': (0.361, 0.722, 0.820),
'rb': (0.439, 0.180, 0.690),
'sr': (0.000, 1.000, 0.000),
'y': (0.580, 1.000, 1.000),
'zr': (0.580, 0.878, 0.878),
'nb': (0.451, 0.761, 0.788),
'mo': (0.329, 0.710, 0.710),
'tc': (0.231, 0.620, 0.620),
'ru': (0.141, 0.561, 0.561),
'rh': (0.039, 0.490, 0.549),
'pd': (0.000, 0.412, 0.522),
'ag': (0.753, 0.753, 0.753),
'cd': (1.000, 0.851, 0.561),
'in': (0.651, 0.459, 0.451),
'sn': (0.400, 0.502, 0.502),
'sb': (0.620, 0.388, 0.710),
'te': (0.831, 0.478, 0.000),
'i': (0.580, 0.000, 0.580),
'xe': (0.259, 0.620, 0.690),
'cs': (0.341, 0.090, 0.561),
'ba': (0.000, 0.788, 0.000),
'la': (0.439, 0.831, 1.000),
'ce': (1.000, 1.000, 0.780),
'pr': (0.851, 1.000, 0.780),
'nd': (0.780, 1.000, 0.780),
'pm': (0.639, 1.000, 0.780),
'sm': (0.561, 1.000, 0.780),
'eu': (0.380, 1.000, 0.780),
'gd': (0.271, 1.000, 0.780),
'tb': (0.188, 1.000, 0.780),
'dy': (0.122, 1.000, 0.780),
'ho': (0.000, 1.000, 0.612),
'er': (0.000, 0.902, 0.459),
'tm': (0.000, 0.831, 0.322),
'yb': (0.000, 0.749, 0.220),
'lu': (0.000, 0.671, 0.141),
'hf': (0.302, 0.761, 1.000),
'ta': (0.302, 0.651, 1.000),
'w': (0.129, 0.580, 0.839),
're': (0.149, 0.490, 0.671),
'os': (0.149, 0.400, 0.588),
'ir': (0.090, 0.329, 0.529),
'pt': (0.816, 0.816, 0.878),
'au': (1.000, 0.820, 0.137),
'hg': (0.722, 0.722, 0.816),
'tl': (0.651, 0.329, 0.302),
'pb': (0.341, 0.349, 0.380),
'bi': (0.620, 0.310, 0.710),
'po': (0.671, 0.361, 0.000),
'at': (0.459, 0.310, 0.271),
'rn': (0.259, 0.510, 0.588),
'fr': (0.259, 0.000, 0.400),
'ra': (0.000, 0.490, 0.000),
'ac': (0.439, 0.671, 0.980),
'th': (0.000, 0.729, 1.000),
'pa': (0.000, 0.631, 1.000),
'u': (0.000, 0.561, 1.000),
'np': (0.000, 0.502, 1.000),
'pu': (0.000, 0.420, 1.000),
'am': (0.329, 0.361, 0.949),
'cm': (0.471, 0.361, 0.890),
'bk': (0.541, 0.310, 0.890),
'cf': (0.631, 0.212, 0.831),
'es': (0.702, 0.122, 0.831),
'fm': (0.702, 0.122, 0.729),
'md': (0.702, 0.051, 0.651),
'no': (0.741, 0.051, 0.529),
'lr': (0.780, 0.000, 0.400),
'rf': (0.800, 0.000, 0.349),
'db': (0.820, 0.000, 0.310),
'sg': (0.851, 0.000, 0.271),
'bh': (0.878, 0.000, 0.220),
'hs': (0.902, 0.000, 0.180),
'mt': (0.922, 0.000, 0.149)
}
VMD_COLORS = {
"blue": (0.000000, 0.000000, 1.000000),
"red": (1.000000, 0.000000, 0.000000),
"gray": (0.350000, 0.350000, 0.350000),
"orange": (1.000000, 0.500000, 0.000000),
"yellow": (1.000000, 1.000000, 0.000000),
"tan": (0.500000, 0.500000, 0.200000),
"silver": (0.600000, 0.600000, 0.600000),
"green": (0.000000, 1.000000, 0.000000),
"white": (1.000000, 1.000000, 1.000000),
"pink": (1.000000, 0.600000, 0.600000),
"cyan": (0.250000, 0.750000, 0.750000),
"purple": (0.650000, 0.000000, 0.650000),
"lime": (0.500000, 0.900000, 0.400000),
"mauve": (0.900000, 0.400000, 0.700000),
"ochre": (0.500000, 0.300000, 0.000000),
"iceblue": (0.500000, 0.500000, 0.750000),
"black": (0.000000, 0.000000, 0.000000),
"yellow2": (0.880000, 0.970000, 0.020000),
"yellow3": (0.550000, 0.900000, 0.020000),
"green2": (0.000000, 0.900000, 0.040000),
"green3": (0.000000, 0.900000, 0.500000),
"cyan2": (0.000000, 0.880000, 1.000000),
"cyan3": (0.000000, 0.760000, 1.000000),
"blue2": (0.020000, 0.380000, 0.670000),
"blue3": (0.010000, 0.040000, 0.930000),
"violet": (0.270000, 0.000000, 0.980000),
"violet2": (0.450000, 0.000000, 0.900000),
"magenta": (0.900000, 0.000000, 0.900000),
"magenta2": (1.000000, 0.000000, 0.660000),
"red2": (0.980000, 0.000000, 0.230000),
"red3": (0.810000, 0.000000, 0.000000),
"orange2": (0.890000, 0.350000, 0.000000),
"orange3": (0.960000, 0.720000, 0.000000)
}
class VMDElementStyler(PaletteElementStyler):
"""Color elements with Molecular Blender defaults"""
def __init__(self):
"""Construct empty colorizer"""
self.palette = {
"ac": VMD_COLORS["ochre"],
"ag": VMD_COLORS["ochre"],
"al": VMD_COLORS["ochre"],
"am": VMD_COLORS["ochre"],
"ar": VMD_COLORS["ochre"],
"as": VMD_COLORS["ochre"],
"at": VMD_COLORS["ochre"],
"au": VMD_COLORS["ochre"],
"b": VMD_COLORS["ochre"],
"ba": VMD_COLORS["ochre"],
"be": VMD_COLORS["ochre"],
"bh": VMD_COLORS["ochre"],
"bi": VMD_COLORS["ochre"],
"bk": VMD_COLORS["ochre"],
"br": VMD_COLORS["ochre"],
"c": VMD_COLORS["cyan"],
"ca": VMD_COLORS["ochre"],
"cd": VMD_COLORS["ochre"],
"ce": VMD_COLORS["ochre"],
"cf": VMD_COLORS["ochre"],
"cl": VMD_COLORS["ochre"],
"cm": VMD_COLORS["ochre"],
"co": VMD_COLORS["ochre"],
"cr": VMD_COLORS["ochre"],
"cs": VMD_COLORS["ochre"],
"cu": VMD_COLORS["ochre"],
"db": VMD_COLORS["ochre"],
"ds": VMD_COLORS["ochre"],
"dy": VMD_COLORS["ochre"],
"er": VMD_COLORS["ochre"],
"es": VMD_COLORS["ochre"],
"eu": VMD_COLORS["ochre"],
"f": VMD_COLORS["ochre"],
"fe": VMD_COLORS["ochre"],
"fm": VMD_COLORS["ochre"],
"fr": VMD_COLORS["ochre"],
"ga": VMD_COLORS["ochre"],
"gd": VMD_COLORS["ochre"],
"ge": VMD_COLORS["ochre"],
"h": VMD_COLORS["white"],
"he": VMD_COLORS["ochre"],
"hf": VMD_COLORS["ochre"],
"hg": VMD_COLORS["ochre"],
"ho": VMD_COLORS["ochre"],
"hs": VMD_COLORS["ochre"],
"i": VMD_COLORS["ochre"],
"in": VMD_COLORS["ochre"],
"ir": VMD_COLORS["ochre"],
"k": VMD_COLORS["ochre"],
"kr": VMD_COLORS["ochre"],
"la": VMD_COLORS["ochre"],
"li": VMD_COLORS["ochre"],
"lr": VMD_COLORS["ochre"],
"lu": VMD_COLORS["ochre"],
"md": VMD_COLORS["ochre"],
"mg": VMD_COLORS["ochre"],
"mn": VMD_COLORS["ochre"],
"mo": VMD_COLORS["ochre"],
"mt": VMD_COLORS["ochre"],
"n": VMD_COLORS["blue"],
"na": VMD_COLORS["ochre"],
"nb": VMD_COLORS["ochre"],
"nd": VMD_COLORS["ochre"],
"ne": VMD_COLORS["ochre"],
"ni": VMD_COLORS["ochre"],
"no": VMD_COLORS["ochre"],
"np": VMD_COLORS["ochre"],
"o": VMD_COLORS["red"],
"os": VMD_COLORS["ochre"],
"p": VMD_COLORS["tan"],
"pa": VMD_COLORS["ochre"],
"pb": VMD_COLORS["ochre"],
"pd": VMD_COLORS["ochre"],
"pm": VMD_COLORS["ochre"],
"po": VMD_COLORS["ochre"],
"pr": VMD_COLORS["ochre"],
"pt": VMD_COLORS["ochre"],
"pu": VMD_COLORS["ochre"],
"ra": VMD_COLORS["ochre"],
"rb": VMD_COLORS["ochre"],
"re": VMD_COLORS["ochre"],
"rf": VMD_COLORS["ochre"],
"rg": VMD_COLORS["ochre"],
"rh": VMD_COLORS["ochre"],
"rn": VMD_COLORS["ochre"],
"ru": VMD_COLORS["ochre"],
"s": VMD_COLORS["yellow"],
"sb": VMD_COLORS["ochre"],
"sc": VMD_COLORS["ochre"],
"se": VMD_COLORS["ochre"],
"sg": VMD_COLORS["ochre"],
"si": VMD_COLORS["ochre"],
"sm": VMD_COLORS["ochre"],
"sn": VMD_COLORS["ochre"],
"sr": VMD_COLORS["ochre"],
"ta": VMD_COLORS["ochre"],
"tb": VMD_COLORS["ochre"],
"tc": VMD_COLORS["ochre"],
"te": VMD_COLORS["ochre"],
"th": VMD_COLORS["ochre"],
"ti": VMD_COLORS["ochre"],
"tl": VMD_COLORS["ochre"],
"tm": VMD_COLORS["ochre"],
"u": VMD_COLORS["ochre"],
"v": VMD_COLORS["ochre"],
"w": VMD_COLORS["ochre"],
"x": VMD_COLORS["purple"],
"xe": VMD_COLORS["ochre"],
"y": VMD_COLORS["ochre"],
"yb": VMD_COLORS["ochre"],
"zn": VMD_COLORS["silver"],
"zr": VMD_COLORS["ochre"]
}
def get_styler(options):
"""Returns styler given option set"""
colors = options["colors"]
if "vmd" in colors:
return VMDElementStyler()
return DefaultElementStyler()
|
smparker/Molecular-Blender
|
stylers.py
|
Python
|
gpl-3.0
| 13,510
|
[
"VMD"
] |
785650d2013e9f96785b006b554515aa11dac374eacf033f7a8f0b382dd64d3c
|
import os
import time
import pandas as pd
import tushare as ts
import statsmodels.api as sm
import statsmodels.tsa.stattools as sts
TABLE_STOCKS_BASIC = 'stock_basic_list'
DownloadDir = './stockdata/'
def adfuller_check_smols(code1, code2, start_date = '2011-10-10', end_date = '2014-09-30'):
m = str(code1)
n = str(code2)
file1 = DownloadDir + "h_kline_" + code1 + ".csv"
file2 = DownloadDir + "h_kline_" + code2 + ".csv"
if not os.path.exists(file1) or not os.path.exists(file1):
return
kline1 = pd.read_csv(file1, parse_dates=['date'], index_col='date', date_parser=tudateparser)
kline2 = pd.read_csv(file2, parse_dates=['date'], index_col='date', date_parser=tudateparser)
#print kline1.head()
price_of_1 = kline1[end_date:start_date]
price_of_2 = kline2[end_date:start_date]
combination = price_of_1.join(price_of_2, how='inner', lsuffix='l', rsuffix='r')
combination.dropna()
closeprice_of_1 = combination['closel'].reset_index(drop=True)
closeprice_of_2 = combination['closer'].reset_index(drop=True)
if len(closeprice_of_1) != 0 and len(closeprice_of_2) != 0:
X = sm.add_constant(closeprice_of_1)
model = sm.OLS(endog=closeprice_of_2, exog=X)
result = model.fit()
# print result.summary()
spread = result.resid
stat = sts.adfuller(x=spread)
adf = stat[0]
pvalue = stat[1]
critical_values = stat[4]
pair = m + '+' + n
return adf < critical_values['10%']
# for(k, v) in critical_values.items():
# print k, v
# spread2 = closeprice_of_2 - closeprice_of_1*result.params.closel
# sta2 = sts.adfuller(spread, 1)
# print sta2
def adfuller_check_online(code1, code2):
#for i in range(len(potentialPair)):
m = str(code1)
n = str(code2)
price_of_1 = ts.get_hist_data(m, start='2011-10-10', end='2014-09-30')
price_of_2 = ts.get_hist_data(n, start='2011-10-10', end='2014-09-30')
price_of_1.to_csv(code1+"20111010-2016-03-05.csv")
price_of_2.to_csv(code1+"20111010-2016-03-05.csv")
closeprice_of_1 = price_of_1['close']
closeprice_of_2 = price_of_2['close']
if len(closeprice_of_1) != 0 and len(closeprice_of_2) != 0:
model = pd.ols(y=closeprice_of_2, x=closeprice_of_1, intercept=True) # perform ols on these two stocks
spread = closeprice_of_2 - closeprice_of_1*model.beta['x']
spread = spread.dropna()
sta = sts.adfuller(spread, 1)
pair = m + '+' + n
print pair + ": adfuller result "
print sta
#date example 2011/10/13
tudateparser = lambda dates: pd.datetime.strptime(dates, '%Y-%m-%d')
def adfuller_check(code1, code2, start_date = '2011-10-10', end_date = '2014-09-30'):
#for i in range(len(potentialPair)):
m = str(code1)
n = str(code2)
file1 = DownloadDir + "h_kline_" + code1 + ".csv"
file2 = DownloadDir + "h_kline_" + code2 + ".csv"
if not os.path.exists(file1) or not os.path.exists(file1):
return
kline1 = pd.read_csv(file1, parse_dates=['date'], index_col='date', date_parser=tudateparser)
kline2 = pd.read_csv(file2, parse_dates=['date'], index_col='date', date_parser=tudateparser)
#print kline1.head()
price_of_1 = kline1[end_date:start_date]
price_of_2 = kline2[end_date:start_date]
combination = price_of_1.join(price_of_2, how='inner', lsuffix='l', rsuffix='r')
combination.dropna()
closeprice_of_1 = combination['closel']
closeprice_of_2 = combination['closer']
if len(closeprice_of_1) != 0 and len(closeprice_of_2) != 0:
model = pd.ols(y=closeprice_of_2, x=closeprice_of_1, intercept=True) # perform ols on these two stocks
spread = closeprice_of_2 - closeprice_of_1*model.beta['x']
spread = spread.dropna()
sta = sts.adfuller(spread, 1)
pair = m + '+' + n
return sta
'''
print pair + ": adfuller result "
print sta
'''
def adfuller_check2(df):
adfuller_check_smols(df[0], df[1])
def adfuller_check3(df):
print df
adfuller_check(df.code1, df.code2)
def check_all_dir():
print 'starting adf checking'
stock_list = pd.read_csv(TABLE_STOCKS_BASIC + '.csv', dtype=str)
code = stock_list['code']
reindexed_code = code.reset_index(drop=True)
reindexed_code = reindexed_code[100:200]
reindexed_code = reindexed_code.reset_index(drop=True)
stockPool = pd.DataFrame(columns=['code1','code2'])
print len(reindexed_code)
for i in range(len(reindexed_code)):
for j in range(i+1, len(reindexed_code)):
stockPool = stockPool.append({'code1':str(reindexed_code[i]), \
'code2':str(reindexed_code[j])}, ignore_index=True)
stockPool.apply(adfuller_check2, axis=1)
'''not working
try:
pool = multiprocessing.Pool(processes=2)
pool.map(adfuller_check3, stockPool)
pool.close()
pool.join()
except Exception as e:
print str(e)
print 'all stock checked'
'''
## Main functionality
def main():
time1 = time.time()
#adfuller_check2("601002", "600815")
#adfuller_check_smols("601002", "600815")
# chedk all stock pairing in list book
check_all_dir()
time2 = time.time()
print "running time(s): ", time2-time1
if __name__ == "__main__":
# Execute Main functionality
main()
|
lionelliang/PairTradingSpark
|
checkpairtradingSingle.py
|
Python
|
gpl-2.0
| 5,387
|
[
"ADF"
] |
659318a7ead58f58ad9c9cab897435a8eb94b89360a174aeceef3b55df099520
|
"""Tests for the BitField class."""
import unittest
import bitfield
__author__ = 'Brian Landers <brian@packetslave.com>'
class BitFieldTest(unittest.TestCase):
"""Tests for the BitField class."""
def setUp(self):
self.bits = bitfield.BitField(36)
def test_constructor(self):
for i in xrange(0, 36):
self.assertFalse(self.bits.test(i))
def test_constructor_args(self):
with self.assertRaises(ValueError):
_ = bitfield.BitField(0)
with self.assertRaises(ValueError):
_ = bitfield.BitField(-1)
def test_set(self):
for i in xrange(0, 36):
self.assertFalse(self.bits.test(i))
self.bits.set(17)
for i in xrange(0, 17):
self.assertFalse(self.bits.test(i))
self.assertTrue(self.bits.test(17))
for i in xrange(18, 36):
self.assertFalse(self.bits.test(i))
def test_set_args(self):
with self.assertRaises(ValueError):
self.bits.set(-1)
with self.assertRaises(ValueError):
self.bits.set(36)
def test_clear(self):
self.bits.set(17)
self.assertTrue(self.bits.test(17))
self.bits.clear(17)
self.assertFalse(self.bits.test(17))
def test_clear_args(self):
with self.assertRaises(ValueError):
self.bits.clear(-1)
with self.assertRaises(ValueError):
self.bits.clear(36)
def test_toggle(self):
self.assertFalse(self.bits.test(17))
self.bits.toggle(17)
self.assertTrue(self.bits.test(17))
self.bits.toggle(17)
self.assertFalse(self.bits.test(17))
def test_toggle_args(self):
with self.assertRaises(ValueError):
self.bits.toggle(-1)
with self.assertRaises(ValueError):
self.bits.toggle(36)
if __name__ == '__main__':
unittest.main()
|
Packetslave/bitfield
|
bitfield_test.py
|
Python
|
apache-2.0
| 1,913
|
[
"Brian"
] |
6dcf0d60ce957438efd1350f9622548c09a7fb544cbfe2bafef26c10b983381d
|
"""
handhRL - table functions
This file contains the table functions needed for main.
"""
import libtcodpy as libtcod
import random
def rolldice(num, sides, highest=0):
# rolls a given number of dice and returns their total
# args: num = number of dice, sides = number of sides on each die,
# highest (optional) = if != 0, returns only the sum of the highest number of dice given
# Ex. (using H&H notation): 4d6 = rolldice(4,6); 3d6H2 = rolldice(3,6,highest=2)
roll = []
total = 0
if highest != 0:
for x in range(num):
roll.append(libtcod.random_get_int(0, 1, sides))
roll.sort(reverse=True)
for x in range(highest):
total += roll[x]
return total
else:
for x in range(num):
roll.append(libtcod.random_get_int(0, 1, sides))
total = sum(roll)
return total
def make_monster_table(dungeon_level):
# generate the dict table for the monster generation
# monster table
# key = name
# dict entries:
# key[0]: dungeon level appearing
# key[1]: list[name, hitdice tuple, color]
monster_table = {'crewman': [1, ['deranged crewmember', (dungeon_level, 8), libtcod.light_red]],
'felix': [1, ['felix', (1, 4), libtcod.light_azure]],
'skinless': [1, ['skinless', (1, 6), libtcod.darker_pink]],
'skeletal': [1, ['skeletal', (1, 10), libtcod.lightest_sepia]],
'lobsterman': [1, ['lobsterman', (1, 6), libtcod.red]],
'cave_mushroom': [1, ['cave mushroom', (1, 6), libtcod.lightest_han]],
'anthropophagi': [1, ['anthropophagi', (1, 8), libtcod.peach]],
'capyfolk': [1, ['capyfolk', (1, 6), libtcod.light_sepia]],
'nagahide': [3, ['nagahide', (2, 12), libtcod.dark_green]],
'clawman': [3, ['clawman', (2, 12), libtcod.black]],
'hiverbug': [5, ['hiverbug', (3, 8), libtcod.yellow]],
'seeker_drone': [5, ['seeker drone', (3, 12), libtcod.silver]],
'neurovore': [7, ['neurovore', (1, 6), libtcod.Color(130, 110, 50)]],
'paleworm': [7, ['paleworm', (5, 6), libtcod.dark_pink]],
'gulper': [9, ['gulper', (5, 8), libtcod.lightest_grey]],
'centipod': [9, ['centipod', (5, 6), libtcod.darkest_red]],
'blind_troll': [9, ['blind troll', (5, 10), libtcod.darkest_green]],
'scumsucker': [11, ['scumsucker', (6, 8), libtcod.peach]],
'living_weapon': [11, ['living weapon', (6, 12), libtcod.black]],
'megaworm': [13, ['megaworm', (8, 10), libtcod.silver]]}
adjust_table = {k: v for k, v in monster_table.iteritems() if v[0] <= dungeon_level}
return adjust_table
def make_weapon():
# generate a weapon name and damage
# table entries for modern are lists: character, name, rolldice tuple (or list if Highest X)
modern_weapon = [['-', 'shiv', (1, 3)],
['-', 'combat knife', (1, 4)],
['-', 'vibro-blade', (1, 6)],
['/', 'cutlass', (1, 8)],
['/', 'vibro-sword', (1, 10)],
['/', 'laser sword', [2, 10, 1]],
[chr(14), 'The Axe', (1, 12)],
[')', 'laser pistol', [2, 6, 1]],
[')', 'slug pistol', (1, 8)],
[')', 'particle beamer', (1, 10)],
['}', 'pulse rifle', [3, 6, 2]],
['}', 'plasma rifle', (2, 6)],
['}', 'bolt rifle', (2, 10)],
['=', 'naval pumpgun', (2, 6)],
['=', 'sonic wavegun', (2, 8)],
['=', 'plasma burster', (2, 12)],
['&', 'minigun', [4, 6, 3]],
['&', 'flamethrower', [1, 8]],
['&', 'microrocket gun', (3, 8)]]
ancient_types = ['dagger', 'sword', 'pistol', 'rifle', 'shotgun', 'heavy']
ancient_names = {'dagger': ['monomolecular', 'phasic', 'plasma', 'hard light', 'synthdiamond', 'chitin'],
'sword': ['monomolecular', 'phasic', 'plasma', 'hard light', 'synthdiamond', 'chitin'],
'pistol': ['neutron slug', 'disintegrator', 'electric arc', 'quark accelerator',
'pain ray', 'dark matter beam'],
'rifle': ['neutron slug', 'disintegrator', 'electric arc', 'quark accelerator',
'pain ray', 'dark matter beam'],
'shotgun': ['graviton wave gun', 'spatial distruptor', 'field projector', 'waveform collapser',
'superfluid blast emitter', 'molecular vibrator'],
'heavy': ['existential dequantifier', 'remote fusion launcher', 'antimatter pod launcher',
'matter melter', 'uncertainty resolver', 'polarity reverser']}
ancient_char = {'dagger': '-',
'sword': '/',
'pistol': ')',
'rifle': '}',
'shotgun': '=',
'heavy': '&'}
ancient_damage = {'dagger': [(1, 4), (1, 6), (1, 8), (1, 10), [2, 10, 1]],
'sword': [(1, 8), (1, 10), (1, 12), [2, 12, 1], [3, 12, 1]],
'pistol': [(1, 8), (1, 10), (1, 12), (2, 8), (2, 10)],
'rifle': [(2, 8), (2, 10), [3, 10, 2], (2, 12), (3, 6)],
'shotgun': [(2, 6), (2, 8), (2, 10), (2, 12), [3, 10, 2]],
'heavy': [(3, 8), (3, 10), (3, 12), (4, 8), (4, 10)]}
# determine if ancient or modern
age = libtcod.random_get_int(0, 1, 4)
if age < 4:
# return modern weapon
char, name, damage = random.choice(modern_weapon)
else:
# choose type of ancient weapon
type = random.choice(ancient_types)
# get the weapon's character
char = ancient_char[type]
# name the weapon
if type == 'heavy' or type == 'shotgun':
name = random.choice(ancient_names[type])
else:
name = random.choice(ancient_names[type]) + ' ' + type
# get the weapon's damage
damage = random.choice(ancient_damage[type])
# roll bonus
bonus = libtcod.random_get_int(0, 1, 3) - 1
# append bonus to name if non-zero
if bonus > 0:
name = name + ' +' + str(bonus)
# determine if it's a gun
if char in [')', '}', '=', '&']:
gun = True
else:
gun = False
# give it ammo if it is
if gun:
if char in [')', '}', '=']:
ammo = rolldice(3, 10)
else:
ammo = rolldice(1, 10)
else:
ammo = None
# put it together
weapon = {'char': char,
'name': name,
'damage': damage,
'bonus': bonus,
'gun': gun,
'ammo': ammo}
return weapon
def make_armor():
# generate a suit of armor or shield
# modern armors
modern_armor = [[']', 'envirosuit', -1],
[']', 'vacc suit', -2],
[']', 'fiberweave', -3],
['{', 'EVA suit', -4],
['{', 'carbon shell', -5],
['{', '"Jump" suit', -6],
['+', 'combat pod', -7],
['+', '"mirror" suit', -8],
['?', 'exo-armor', -9],
['?', 'exo-jet suit', -10],
['?', 'bioweapon suit', -5],
['[', 'plexsteel shield', -1],
['[', 'particle shield', -2]]
ancient_types = ['light', 'medium', 'heavy', 'powered', 'shield']
ancient_chars = {'light': ']',
'medium': '{',
'heavy': '+',
'powered': '?',
'shield': '['}
ancient_names = {'light': ['hard light', 'chitin', 'steelskin', 'megafauna hide', 'titanium foil',
'uncertainty field'],
'medium': ['hard light', 'chitin', 'steelskin', 'megafauna hide', 'titanium foil',
'uncertainty field'],
'heavy': ['diamond weave', 'neutronium plate', 'Schrodinger state', 'crystal timber',
'labyrinthum', 'depleted uranium'],
'powered': ['diamond weave', 'neutronium plate', 'Schrodinger state', 'crystal timber',
'labyrinthum', 'depleted uranium'],
'shield': ['hard light', 'Pauli field', 'smart', 'dark matter', 'micro-singularity',
'dephasic']}
ancient_suffix = {'light': 'suit',
'medium': 'armor',
'heavy': 'shell',
'powered': 'exo-suit',
'shield': 'shield'}
#check for modern or ancient
if rolldice(1, 4) < 4:
# get modern details
char, name, ac = random.choice(modern_armor)
is_modern = True
else:
# generate ancient details
type = random.choice(ancient_types)
char = ancient_chars[type]
name = random.choice(ancient_names[type]) + ' ' + ancient_suffix[type]
is_modern = False
# generate base AC
if type == 'light':
ac = 10 - rolldice(1, 4)
elif type == 'medium':
ac = 7 - rolldice(1, 4)
elif type == 'heavy':
ac = 5 - rolldice(1, 4)
elif type == 'powered':
ac = 1 - rolldice(1, 2)
elif type == 'shield':
ac = 0 - rolldice(1, 2)
# recompute ac as a bonus to base 10
ac += -10
# generate armor bonus
bonus = rolldice(1, 3) - 3
# if armor bonus, append to name and add to ac
if bonus < 0:
name += ' ' + str(bonus)
ac += bonus
# if powered armor, it provides a STR/DEX bonus if ancient, or a simply STR bonus if modern
# because handhRL doesn't yet use the full stat line, we abstract this to to-hit and damage bonuses later
if char == '?' and not is_modern:
str_bonus = rolldice(1, 2)
dex_bonus = rolldice(1, 2) - 1
elif char == '?' and is_modern and name == 'bioweapon suit':
str_bonus = 2
dex_bonus = 2
elif char == '?' and is_modern:
str_bonus = 1
dex_bonus = 0
else:
str_bonus = 0
dex_bonus = 0
armor = {'char': char, 'name': name, 'ac': ac, 'str_bonus': str_bonus, 'dex_bonus': dex_bonus}
return armor
def make_heal_item():
# create parameters for a healing item
# parameter list: name, rolldice tuple, reusable flag, # of uses, heal_all flag
items = [
['Opacaine', (1, 4), False, 1, False],
['first-aid kit', (1, 6), True, 3, False],
['Heal-X', None, False, 1, True],
['Panacea', None, True, 10, True]
]
name, roll, reuse, uses, heal_all = random.choice(items)
if not reuse:
name = 'dose of ' + name
item = {
'name': name,
'roll': roll,
'reuse': reuse,
'uses': uses,
'heal_all': heal_all
}
return item
def make_grenade():
# create a grenade object
# parameter list: name, target damage, blast radius, radius damage, automatically kills target,
# automatically kills targets in radius
grenades = [
['frag', (3, 6), 3, (1, 6), False, False],
['incendiary', (1, 6), 3, (1, 6), False, False],
['plasma', (4, 6), 3, (4, 6), False, False],
['Thermex', (4, 6), 0, None, False, False],
['Compound S', (5, 6), 3, (3, 6), False, False],
['microfusion', None, 6, None, True, True],
['microfission', None, 6, (5, 6), True, False]
]
name, damage, radius, radius_damage, kills, kills_radius = random.choice(grenades)
name += ' grenade'
item = {
'name': name,
'damage': damage,
'radius': radius,
'radius_damage': radius_damage,
'kills': kills,
'kills_radius': kills_radius
}
return item
def make_buff():
# generate parameters for buff items
# generate a list with on random bonus for nano-augment
augment = [0 for x in range(6)]
for x in range(len(augment)):
roll = libtcod.random_get_int(0, 0, 1)
if roll != 0:
augment[x] = roll
break
# buff parameters: name, arguments list
# arguments list: max_hp=0, to_hit=0, damage=0, ac=0, xp=0, dr=0, desc=None
buffs = [
['Immunol', [1, 0, 0, 0, 0, 0, 'You feel more resilient!']],
['Clariphine', [0, 0, 0, 0, 0, 1, 'You feel like you could take on the world!']],
['cellular motility boost', [0, 1, 1, 0, 0, 0, 'You feel more agile.']],
['nano-augment capsule', augment]
]
name, args = random.choice(buffs)
return {'name': name, 'args': args}
|
jarcane/handhRL
|
hhtable.py
|
Python
|
gpl-3.0
| 13,115
|
[
"BLAST",
"CRYSTAL"
] |
cbd80280536df220580aa6c04b0e88f8c737ea326e5ea1aee8ec292b8889ea6b
|
# coding: utf-8
"""
Acceptance tests for Studio's Setting pages
"""
from __future__ import unicode_literals
import os
from mock import patch
from nose.plugins.attrib import attr
from base_studio_test import StudioCourseTest
from bok_choy.promise import EmptyPromise
from common.test.acceptance.fixtures.course import XBlockFixtureDesc
from common.test.acceptance.tests.helpers import create_user_partition_json, element_has_text
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.pages.studio.settings_advanced import AdvancedSettingsPage
from common.test.acceptance.pages.studio.settings_group_configurations import GroupConfigurationsPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.studio.utils import get_input_value
from textwrap import dedent
from xmodule.partitions.partitions import Group
@attr(shard=8)
class ContentGroupConfigurationTest(StudioCourseTest):
"""
Tests for content groups in the Group Configurations Page.
There are tests for the experiment groups in test_studio_split_test.
"""
def setUp(self):
super(ContentGroupConfigurationTest, self).setUp()
self.group_configurations_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.outline_page = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def populate_course_fixture(self, course_fixture):
"""
Populates test course with chapter, sequential, and 1 problems.
The problem is visible only to Group "alpha".
"""
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
)
)
def create_and_verify_content_group(self, name, existing_groups):
"""
Creates a new content group and verifies that it was properly created.
"""
self.assertEqual(existing_groups, len(self.group_configurations_page.content_groups))
if existing_groups == 0:
self.group_configurations_page.create_first_content_group()
else:
self.group_configurations_page.add_content_group()
config = self.group_configurations_page.content_groups[existing_groups]
config.name = name
# Save the content group
self.assertEqual(config.get_text('.action-primary'), "Create")
self.assertFalse(config.delete_button_is_present)
config.save()
self.assertIn(name, config.name)
return config
def test_no_content_groups_by_default(self):
"""
Scenario: Ensure that message telling me to create a new content group is
shown when no content groups exist.
Given I have a course without content groups
When I go to the Group Configuration page in Studio
Then I see "You have not created any content groups yet." message
"""
self.group_configurations_page.visit()
self.assertTrue(self.group_configurations_page.no_content_groups_message_is_present)
self.assertIn(
"You have not created any content groups yet.",
self.group_configurations_page.no_content_groups_message_text
)
def test_can_create_and_edit_content_groups(self):
"""
Scenario: Ensure that the content groups can be created and edited correctly.
Given I have a course without content groups
When I click button 'Add your first Content Group'
And I set new the name and click the button 'Create'
Then I see the new content is added and has correct data
And I click 'New Content Group' button
And I set the name and click the button 'Create'
Then I see the second content group is added and has correct data
When I edit the second content group
And I change the name and click the button 'Save'
Then I see the second content group is saved successfully and has the new name
"""
self.group_configurations_page.visit()
self.create_and_verify_content_group("New Content Group", 0)
second_config = self.create_and_verify_content_group("Second Content Group", 1)
# Edit the second content group
second_config.edit()
second_config.name = "Updated Second Content Group"
self.assertEqual(second_config.get_text('.action-primary'), "Save")
second_config.save()
self.assertIn("Updated Second Content Group", second_config.name)
def test_cannot_delete_used_content_group(self):
"""
Scenario: Ensure that the user cannot delete used content group.
Given I have a course with 1 Content Group
And I go to the Group Configuration page
When I try to delete the Content Group with name "New Content Group"
Then I see the delete button is disabled.
"""
self.course_fixture._update_xblock(self.course_fixture._course_location, {
"metadata": {
u"user_partitions": [
create_user_partition_json(
0,
'Configuration alpha,',
'Content Group Partition',
[Group("0", 'alpha')],
scheme="cohort"
)
],
},
})
problem_data = dedent("""
<problem markdown="Simple Problem" max_attempts="" weight="">
<p>Choose Yes.</p>
<choiceresponse>
<checkboxgroup>
<choice correct="true">Yes</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""")
vertical = self.course_fixture.get_nested_xblocks(category="vertical")[0]
self.course_fixture.create_xblock(
vertical.locator,
XBlockFixtureDesc('problem', "VISIBLE TO ALPHA", data=problem_data, metadata={"group_access": {0: [0]}}),
)
self.group_configurations_page.visit()
config = self.group_configurations_page.content_groups[0]
self.assertTrue(config.delete_button_is_disabled)
def test_can_delete_unused_content_group(self):
"""
Scenario: Ensure that the user can delete unused content group.
Given I have a course with 1 Content Group
And I go to the Group Configuration page
When I delete the Content Group with name "New Content Group"
Then I see that there is no Content Group
When I refresh the page
Then I see that the content group has been deleted
"""
self.group_configurations_page.visit()
config = self.create_and_verify_content_group("New Content Group", 0)
self.assertTrue(config.delete_button_is_present)
self.assertEqual(len(self.group_configurations_page.content_groups), 1)
# Delete content group
config.delete()
self.assertEqual(len(self.group_configurations_page.content_groups), 0)
self.group_configurations_page.visit()
self.assertEqual(len(self.group_configurations_page.content_groups), 0)
def test_must_supply_name(self):
"""
Scenario: Ensure that validation of the content group works correctly.
Given I have a course without content groups
And I create new content group without specifying a name click the button 'Create'
Then I see error message "Content Group name is required."
When I set a name and click the button 'Create'
Then I see the content group is saved successfully
"""
self.group_configurations_page.visit()
self.group_configurations_page.create_first_content_group()
config = self.group_configurations_page.content_groups[0]
config.save()
self.assertEqual(config.mode, 'edit')
self.assertEqual("Group name is required", config.validation_message)
config.name = "Content Group Name"
config.save()
self.assertIn("Content Group Name", config.name)
def test_can_cancel_creation_of_content_group(self):
"""
Scenario: Ensure that creation of a content group can be canceled correctly.
Given I have a course without content groups
When I click button 'Add your first Content Group'
And I set new the name and click the button 'Cancel'
Then I see that there is no content groups in the course
"""
self.group_configurations_page.visit()
self.group_configurations_page.create_first_content_group()
config = self.group_configurations_page.content_groups[0]
config.name = "Content Group"
config.cancel()
self.assertEqual(0, len(self.group_configurations_page.content_groups))
def test_content_group_empty_usage(self):
"""
Scenario: When content group is not used, ensure that the link to outline page works correctly.
Given I have a course without content group
And I create new content group
Then I see a link to the outline page
When I click on the outline link
Then I see the outline page
"""
self.group_configurations_page.visit()
config = self.create_and_verify_content_group("New Content Group", 0)
config.toggle()
config.click_outline_anchor()
# Waiting for the page load and verify that we've landed on course outline page
self.outline_page.wait_for_page()
@attr(shard=8)
class AdvancedSettingsValidationTest(StudioCourseTest):
"""
Tests for validation feature in Studio's advanced settings tab
"""
def setUp(self):
super(AdvancedSettingsValidationTest, self).setUp()
self.advanced_settings = AdvancedSettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.type_fields = ['Course Display Name', 'Advanced Module List', 'Discussion Topic Mapping',
'Maximum Attempts', 'Course Announcement Date']
# Before every test, make sure to visit the page first
self.advanced_settings.visit()
def test_modal_shows_one_validation_error(self):
"""
Test that advanced settings don't save if there's a single wrong input,
and that it shows the correct error message in the modal.
"""
# Feed an integer value for String field.
# .set method saves automatically after setting a value
course_display_name = self.advanced_settings.get('Course Display Name')
self.advanced_settings.set('Course Display Name', 1)
self.advanced_settings.wait_for_modal_load()
# Test Modal
self.check_modal_shows_correct_contents(['Course Display Name'])
self.advanced_settings.refresh_and_wait_for_load()
self.assertEquals(
self.advanced_settings.get('Course Display Name'),
course_display_name,
'Wrong input for Course Display Name must not change its value'
)
def test_modal_shows_multiple_validation_errors(self):
"""
Test that advanced settings don't save with multiple wrong inputs
"""
# Save original values and feed wrong inputs
original_values_map = self.get_settings_fields_of_each_type()
self.set_wrong_inputs_to_fields()
self.advanced_settings.wait_for_modal_load()
# Test Modal
self.check_modal_shows_correct_contents(self.type_fields)
self.advanced_settings.refresh_and_wait_for_load()
for key, val in original_values_map.iteritems():
self.assertEquals(
self.advanced_settings.get(key),
val,
'Wrong input for Advanced Settings Fields must not change its value'
)
def test_undo_changes(self):
"""
Test that undo changes button in the modal resets all settings changes
"""
# Save original values and feed wrong inputs
original_values_map = self.get_settings_fields_of_each_type()
self.set_wrong_inputs_to_fields()
# Let modal popup
self.advanced_settings.wait_for_modal_load()
# Click Undo Changes button
self.advanced_settings.undo_changes_via_modal()
# Check that changes are undone
for key, val in original_values_map.iteritems():
self.assertEquals(
self.advanced_settings.get(key),
val,
'Undoing Should revert back to original value'
)
def test_manual_change(self):
"""
Test that manual changes button in the modal keeps settings unchanged
"""
inputs = {"Course Display Name": 1,
"Advanced Module List": 1,
"Discussion Topic Mapping": 1,
"Maximum Attempts": '"string"',
"Course Announcement Date": '"string"',
}
self.set_wrong_inputs_to_fields()
self.advanced_settings.wait_for_modal_load()
self.advanced_settings.trigger_manual_changes()
# Check that the validation modal went away.
self.assertFalse(self.advanced_settings.is_validation_modal_present())
# Iterate through the wrong values and make sure they're still displayed
for key, val in inputs.iteritems():
self.assertEquals(
str(self.advanced_settings.get(key)),
str(val),
'manual change should keep: ' + str(val) + ', but is: ' + str(self.advanced_settings.get(key))
)
def check_modal_shows_correct_contents(self, wrong_settings_list):
"""
Helper function that checks if the validation modal contains correct
error messages.
"""
# Check presence of modal
self.assertTrue(self.advanced_settings.is_validation_modal_present())
# List of wrong settings item & what is presented in the modal should be the same
error_item_names = self.advanced_settings.get_error_item_names()
self.assertEqual(set(wrong_settings_list), set(error_item_names))
error_item_messages = self.advanced_settings.get_error_item_messages()
self.assertEqual(len(error_item_names), len(error_item_messages))
def get_settings_fields_of_each_type(self):
"""
Get one of each field type:
- String: Course Display Name
- List: Advanced Module List
- Dict: Discussion Topic Mapping
- Integer: Maximum Attempts
- Date: Course Announcement Date
"""
return {
"Course Display Name": self.advanced_settings.get('Course Display Name'),
"Advanced Module List": self.advanced_settings.get('Advanced Module List'),
"Discussion Topic Mapping": self.advanced_settings.get('Discussion Topic Mapping'),
"Maximum Attempts": self.advanced_settings.get('Maximum Attempts'),
"Course Announcement Date": self.advanced_settings.get('Course Announcement Date'),
}
def set_wrong_inputs_to_fields(self):
"""
Set wrong values for the chosen fields
"""
self.advanced_settings.set_values(
{
"Course Display Name": 1,
"Advanced Module List": 1,
"Discussion Topic Mapping": 1,
"Maximum Attempts": '"string"',
"Course Announcement Date": '"string"',
}
)
def test_only_expected_fields_are_displayed(self):
"""
Scenario: The Advanced Settings screen displays settings/fields not specifically hidden from
view by a developer.
Given I have a set of CourseMetadata fields defined for the course
When I view the Advanced Settings screen for the course
The total number of fields displayed matches the number I expect
And the actual fields displayed match the fields I expect to see
"""
expected_fields = self.advanced_settings.expected_settings_names
displayed_fields = self.advanced_settings.displayed_settings_names
self.assertEquals(set(displayed_fields), set(expected_fields))
@attr(shard=1)
class ContentLicenseTest(StudioCourseTest):
"""
Tests for course-level licensing (that is, setting the license,
for an entire course's content, to All Rights Reserved or Creative Commons)
"""
def setUp(self): # pylint: disable=arguments-differ
super(ContentLicenseTest, self).setUp()
self.outline_page = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.settings_page = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.lms_courseware = CoursewarePage(
self.browser,
self.course_id,
)
self.settings_page.visit()
def test_empty_license(self):
"""
When I visit the Studio settings page,
I see that the course license is "All Rights Reserved" by default.
Then I visit the LMS courseware page,
and I see that the default course license is displayed.
"""
self.assertEqual(self.settings_page.course_license, "All Rights Reserved")
self.lms_courseware.visit()
self.assertEqual(self.lms_courseware.course_license, "© All Rights Reserved")
def test_arr_license(self):
"""
When I visit the Studio settings page,
and I set the course license to "All Rights Reserved",
and I refresh the page,
I see that the course license is "All Rights Reserved".
Then I visit the LMS courseware page,
and I see that the course license is "All Rights Reserved".
"""
self.settings_page.course_license = "All Rights Reserved"
self.settings_page.save_changes()
self.settings_page.refresh_and_wait_for_load()
self.assertEqual(self.settings_page.course_license, "All Rights Reserved")
self.lms_courseware.visit()
self.assertEqual(self.lms_courseware.course_license, "© All Rights Reserved")
def test_cc_license(self):
"""
When I visit the Studio settings page,
and I set the course license to "Creative Commons",
and I refresh the page,
I see that the course license is "Creative Commons".
Then I visit the LMS courseware page,
and I see that the course license is "Some Rights Reserved".
"""
self.settings_page.course_license = "Creative Commons"
self.settings_page.save_changes()
self.settings_page.refresh_and_wait_for_load()
self.assertEqual(self.settings_page.course_license, "Creative Commons")
self.lms_courseware.visit()
# The course_license text will include a bunch of screen reader text to explain
# the selected options
self.assertIn("Some Rights Reserved", self.lms_courseware.course_license)
@attr('a11y')
class StudioSettingsA11yTest(StudioCourseTest):
"""
Class to test Studio pages accessibility.
"""
def setUp(self): # pylint: disable=arguments-differ
super(StudioSettingsA11yTest, self).setUp()
self.settings_page = SettingsPage(self.browser, self.course_info['org'], self.course_info['number'],
self.course_info['run'])
def test_studio_settings_page_a11y(self):
"""
Check accessibility of SettingsPage.
"""
self.settings_page.visit()
self.settings_page.wait_for_page()
self.settings_page.a11y_audit.config.set_rules({
"ignore": [
'link-href', # TODO: AC-590
],
})
self.settings_page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
class StudioSubsectionSettingsA11yTest(StudioCourseTest):
"""
Class to test accessibility on the subsection settings modals.
"""
def setUp(self): # pylint: disable=arguments-differ
browser = os.environ.get('SELENIUM_BROWSER', 'firefox')
# This test will fail if run using phantomjs < 2.0, due to an issue with bind()
# See https://github.com/ariya/phantomjs/issues/10522 for details.
# The course_outline uses this function, and as such will not fully load when run
# under phantomjs 1.9.8. So, to prevent this test from timing out at course_outline.visit(),
# force the use of firefox vs the standard a11y test usage of phantomjs 1.9.8.
# TODO: remove this block once https://openedx.atlassian.net/browse/TE-1047 is resolved.
if browser == 'phantomjs':
browser = 'firefox'
with patch.dict(os.environ, {'SELENIUM_BROWSER': browser}):
super(StudioSubsectionSettingsA11yTest, self).setUp(is_staff=True)
self.course_outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def populate_course_fixture(self, course_fixture):
course_fixture.add_advanced_settings({
"enable_proctored_exams": {"value": "true"}
})
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section 1').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 1').add_children(
XBlockFixtureDesc('problem', 'Test Problem 1')
)
)
)
def test_special_exams_menu_a11y(self):
"""
Given that I am a staff member
And I am editing settings on the special exams menu
Then that menu is accessible
"""
self.course_outline.visit()
self.course_outline.open_subsection_settings_dialog()
self.course_outline.select_advanced_tab()
# limit the scope of the audit to the special exams tab on the modal dialog
self.course_outline.a11y_audit.config.set_scope(
include=['section.edit-settings-timed-examination']
)
self.course_outline.a11y_audit.check_for_accessibility_errors()
@attr(shard=1)
class StudioSettingsImageUploadTest(StudioCourseTest):
"""
Class to test course settings image uploads.
"""
def setUp(self): # pylint: disable=arguments-differ
super(StudioSettingsImageUploadTest, self).setUp()
self.settings_page = SettingsPage(self.browser, self.course_info['org'], self.course_info['number'],
self.course_info['run'])
self.settings_page.visit()
# Ensure jquery is loaded before running a jQuery
self.settings_page.wait_for_ajax()
# This text appears towards the end of the work that jQuery is performing on the page
self.settings_page.wait_for_jquery_value('input#course-name:text', 'test_run')
def test_upload_course_card_image(self):
# upload image
file_to_upload = 'image.jpg'
self.settings_page.upload_image('#upload-course-image', file_to_upload)
self.assertIn(file_to_upload, self.settings_page.get_uploaded_image_path('#course-image'))
def test_upload_course_banner_image(self):
# upload image
file_to_upload = 'image.jpg'
self.settings_page.upload_image('#upload-banner-image', file_to_upload)
self.assertIn(file_to_upload, self.settings_page.get_uploaded_image_path('#banner-image'))
def test_upload_course_video_thumbnail_image(self):
# upload image
file_to_upload = 'image.jpg'
self.settings_page.upload_image('#upload-video-thumbnail-image', file_to_upload)
self.assertIn(file_to_upload, self.settings_page.get_uploaded_image_path('#video-thumbnail-image'))
@attr(shard=1)
class CourseSettingsTest(StudioCourseTest):
"""
Class to test course settings.
"""
COURSE_START_DATE_CSS = "#course-start-date"
COURSE_END_DATE_CSS = "#course-end-date"
ENROLLMENT_START_DATE_CSS = "#course-enrollment-start-date"
ENROLLMENT_END_DATE_CSS = "#course-enrollment-end-date"
COURSE_START_TIME_CSS = "#course-start-time"
COURSE_END_TIME_CSS = "#course-end-time"
ENROLLMENT_START_TIME_CSS = "#course-enrollment-start-time"
ENROLLMENT_END_TIME_CSS = "#course-enrollment-end-time"
course_start_date = '12/20/2013'
course_end_date = '12/26/2013'
enrollment_start_date = '12/01/2013'
enrollment_end_date = '12/10/2013'
dummy_time = "15:30"
def setUp(self, is_staff=False, test_xss=True):
super(CourseSettingsTest, self).setUp()
self.settings_page = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# Before every test, make sure to visit the page first
self.settings_page.visit()
self.ensure_input_fields_are_loaded()
def set_course_dates(self):
"""
Set dates for the course.
"""
dates_dictionary = {
self.COURSE_START_DATE_CSS: self.course_start_date,
self.COURSE_END_DATE_CSS: self.course_end_date,
self.ENROLLMENT_START_DATE_CSS: self.enrollment_start_date,
self.ENROLLMENT_END_DATE_CSS: self.enrollment_end_date
}
self.settings_page.set_element_values(dates_dictionary)
def ensure_input_fields_are_loaded(self):
"""
Ensures values in input fields are loaded.
"""
EmptyPromise(
lambda: self.settings_page.q(css='#course-organization').attrs('value')[0],
"Waiting for input fields to be loaded"
).fulfill()
def test_user_can_set_course_date(self):
"""
Scenario: User can set course dates
Given I have opened a new course in Studio
When I select Schedule and Details
And I set course dates
And I press the "Save" notification button
And I reload the page
Then I see the set dates
"""
# Set dates
self.set_course_dates()
# Set times
time_dictionary = {
self.COURSE_START_TIME_CSS: self.dummy_time,
self.ENROLLMENT_END_TIME_CSS: self.dummy_time
}
self.settings_page.set_element_values(time_dictionary)
# Save changes
self.settings_page.save_changes()
self.settings_page.refresh_and_wait_for_load()
self.ensure_input_fields_are_loaded()
css_selectors = [self.COURSE_START_DATE_CSS, self.COURSE_END_DATE_CSS,
self.ENROLLMENT_START_DATE_CSS, self.ENROLLMENT_END_DATE_CSS,
self.COURSE_START_TIME_CSS, self.ENROLLMENT_END_TIME_CSS]
expected_values = [self.course_start_date, self.course_end_date,
self.enrollment_start_date, self.enrollment_end_date,
self.dummy_time, self.dummy_time]
# Assert changes have been persistent.
self.assertEqual(
[get_input_value(self.settings_page, css_selector) for css_selector in css_selectors],
expected_values
)
def test_clear_previously_set_course_dates(self):
"""
Scenario: User can clear previously set course dates (except start date)
Given I have set course dates
And I clear all the dates except start
And I press the "Save" notification button
And I reload the page
Then I see cleared dates
"""
# Set dates
self.set_course_dates()
# Clear all dates except start date
values_to_set = {
self.COURSE_END_DATE_CSS: '',
self.ENROLLMENT_START_DATE_CSS: '',
self.ENROLLMENT_END_DATE_CSS: ''
}
self.settings_page.set_element_values(values_to_set)
# Save changes and refresh the page
self.settings_page.save_changes()
self.settings_page.refresh_and_wait_for_load()
self.ensure_input_fields_are_loaded()
css_selectors = [self.COURSE_START_DATE_CSS, self.COURSE_END_DATE_CSS,
self.ENROLLMENT_START_DATE_CSS, self.ENROLLMENT_END_DATE_CSS]
expected_values = [self.course_start_date, '', '', '']
# Assert changes have been persistent.
self.assertEqual(
[get_input_value(self.settings_page, css_selector) for css_selector in css_selectors],
expected_values
)
def test_cannot_clear_the_course_start_date(self):
"""
Scenario: User cannot clear the course start date
Given I have set course dates
And I press the "Save" notification button
And I clear the course start date
Then I receive a warning about course start date
And I reload the page
And the previously set start date is shown
"""
# Set dates
self.set_course_dates()
# Save changes
self.settings_page.save_changes()
# Get default start date
default_start_date = get_input_value(self.settings_page, self.COURSE_START_DATE_CSS)
# Set course start date to empty
self.settings_page.set_element_values({self.COURSE_START_DATE_CSS: ''})
# Make sure error message is show with appropriate message
error_message_css = '.message-error'
self.settings_page.wait_for_element_presence(error_message_css, 'Error message is present')
self.assertEqual(element_has_text(self.settings_page, error_message_css,
"The course must have an assigned start date."), True)
# Refresh the page and assert start date has not changed.
self.settings_page.refresh_and_wait_for_load()
self.ensure_input_fields_are_loaded()
self.assertEqual(
get_input_value(self.settings_page, self.COURSE_START_DATE_CSS),
default_start_date
)
def test_user_can_correct_course_start_date_warning(self):
"""
Scenario: User can correct the course start date warning
Given I have tried to clear the course start
And I have entered a new course start date
And I press the "Save" notification button
Then The warning about course start date goes away
And I reload the page
Then my new course start date is shown
"""
# Set course start date to empty
self.settings_page.set_element_values({self.COURSE_START_DATE_CSS: ''})
# Make sure we get error message
error_message_css = '.message-error'
self.settings_page.wait_for_element_presence(error_message_css, 'Error message is present')
self.assertEqual(element_has_text(self.settings_page, error_message_css,
"The course must have an assigned start date."), True)
# Set new course start value
self.settings_page.set_element_values({self.COURSE_START_DATE_CSS: self.course_start_date})
self.settings_page.un_focus_input_field()
# Error message disappears
self.settings_page.wait_for_element_absence(error_message_css, 'Error message is not present')
# Save the changes and refresh the page.
self.settings_page.save_changes()
self.settings_page.refresh_and_wait_for_load()
self.ensure_input_fields_are_loaded()
# Assert changes are persistent.
self.assertEqual(
get_input_value(self.settings_page, self.COURSE_START_DATE_CSS),
self.course_start_date
)
def test_settings_are_only_persisted_when_saved(self):
"""
Scenario: Settings are only persisted when saved
Given I have set course dates
And I press the "Save" notification button
When I change fields
And I reload the page
Then I do not see the changes
"""
# Set course dates.
self.set_course_dates()
# Save changes.
self.settings_page.save_changes()
default_value_enrollment_start_date = get_input_value(self.settings_page,
self.ENROLLMENT_START_TIME_CSS)
# Set the value of enrollment start time and
# reload the page without saving.
self.settings_page.set_element_values({self.ENROLLMENT_START_TIME_CSS: self.dummy_time})
self.settings_page.refresh_and_wait_for_load()
self.ensure_input_fields_are_loaded()
css_selectors = [self.COURSE_START_DATE_CSS, self.COURSE_END_DATE_CSS,
self.ENROLLMENT_START_DATE_CSS, self.ENROLLMENT_END_DATE_CSS,
self.ENROLLMENT_START_TIME_CSS]
expected_values = [self.course_start_date, self.course_end_date,
self.enrollment_start_date, self.enrollment_end_date,
default_value_enrollment_start_date]
# Assert that value of enrolment start time
# is not saved.
self.assertEqual(
[get_input_value(self.settings_page, css_selector) for css_selector in css_selectors],
expected_values
)
def test_settings_are_reset_on_cancel(self):
"""
Scenario: Settings are reset on cancel
Given I have set course dates
And I press the "Save" notification button
When I change fields
And I press the "Cancel" notification button
Then I do not see the changes
"""
# Set course date
self.set_course_dates()
# Save changes
self.settings_page.save_changes()
default_value_enrollment_start_date = get_input_value(self.settings_page,
self.ENROLLMENT_START_TIME_CSS)
# Set value but don't save it.
self.settings_page.set_element_values({self.ENROLLMENT_START_TIME_CSS: self.dummy_time})
self.settings_page.click_button("cancel")
# Make sure changes are not saved after cancel.
css_selectors = [self.COURSE_START_DATE_CSS, self.COURSE_END_DATE_CSS,
self.ENROLLMENT_START_DATE_CSS, self.ENROLLMENT_END_DATE_CSS,
self.ENROLLMENT_START_TIME_CSS]
expected_values = [self.course_start_date, self.course_end_date,
self.enrollment_start_date, self.enrollment_end_date,
default_value_enrollment_start_date]
self.assertEqual(
[get_input_value(self.settings_page, css_selector) for css_selector in css_selectors],
expected_values
)
def test_confirmation_is_shown_on_save(self):
"""
Scenario: Confirmation is shown on save
Given I have opened a new course in Studio
When I select Schedule and Details
And I change the "<field>" field to "<value>"
And I press the "Save" notification button
Then I see a confirmation that my changes have been saved
"""
# Set date
self.settings_page.set_element_values({self.COURSE_START_DATE_CSS: self.course_start_date})
# Confirmation is showed upon save.
# Save_changes function ensures that save
# confirmation is shown.
self.settings_page.save_changes()
def test_changes_in_course_overview_show_a_confirmation(self):
"""
Scenario: Changes in Course Overview show a confirmation
Given I have opened a new course in Studio
When I select Schedule and Details
And I change the course overview
And I press the "Save" notification button
Then I see a confirmation that my changes have been saved
"""
# Change the value of course overview
self.settings_page.change_course_description('Changed overview')
# Save changes
# Save_changes function ensures that save
# confirmation is shown.
self.settings_page.save_changes()
def test_user_cannot_save_invalid_settings(self):
"""
Scenario: User cannot save invalid settings
Given I have opened a new course in Studio
When I select Schedule and Details
And I change the "Course Start Date" field to ""
Then the save notification button is disabled
"""
# Change the course start date to invalid date.
self.settings_page.set_element_values({self.COURSE_START_DATE_CSS: ''})
# Confirm that save button is disabled.
self.assertEqual(self.settings_page.is_element_present(".action-primary.action-save.is-disabled"), True)
|
synergeticsedx/deployment-wipro
|
common/test/acceptance/tests/studio/test_studio_settings.py
|
Python
|
agpl-3.0
| 37,503
|
[
"VisIt"
] |
1cf1a125670299e613eece1ce0637ea0579202d1756f49688f48b07909d372ef
|
#!/usr/bin/env python
#
# Authors: James D. McClain <jmcclain@princeton.edu>
#
"""Module for running restricted closed-shell k-point ccsd(t)"""
import ctypes
import h5py
import itertools
import numpy as np
import pyscf.pbc.cc.kccsd_rhf
import time
from itertools import product
from pyscf import lib
from pyscf.cc import _ccsd
from pyscf.lib import logger
from pyscf.lib.misc import tril_product
from pyscf.lib.misc import flatten
from pyscf.lib.numpy_helper import cartesian_prod
from pyscf.lib.numpy_helper import pack_tril
from pyscf.lib.parameters import LARGE_DENOM
from pyscf.pbc import scf
from pyscf.pbc.lib import kpts_helper
from pyscf.pbc.mp.kmp2 import (get_frozen_mask, get_nocc, get_nmo,
padded_mo_coeff, padding_k_idx)
from pyscf import __config__
#einsum = np.einsum
einsum = lib.einsum
# CCSD(T) equations taken from Scuseria, JCP (94), 1991
#
# NOTE: As pointed out in cc/ccsd_t_slow.py, there is an error in this paper
# and the equation should read [ia] >= [jb] >= [kc] (since the only
# symmetry in spin-less operators is the exchange of a column of excitation
# ooperators).
def kernel(mycc, eris, t1=None, t2=None, max_memory=2000, verbose=logger.INFO):
'''Returns the CCSD(T) for restricted closed-shell systems with k-points.
Note:
Returns real part of the CCSD(T) energy, raises warning if there is
a complex part.
Args:
mycc (:class:`RCCSD`): Coupled-cluster object storing results of
a coupled-cluster calculation.
eris (:class:`_ERIS`): Integral object holding the relevant electron-
repulsion integrals and Fock matrix elements
t1 (:obj:`ndarray`): t1 coupled-cluster amplitudes
t2 (:obj:`ndarray`): t2 coupled-cluster amplitudes
max_memory (float): Maximum memory used in calculation (NOT USED)
verbose (int, :class:`Logger`): verbosity of calculation
Returns:
energy_t (float): The real-part of the k-point CCSD(T) energy.
'''
assert isinstance(mycc, pyscf.pbc.cc.kccsd_rhf.RCCSD)
cpu1 = cpu0 = (time.clock(), time.time())
if isinstance(verbose, logger.Logger):
log = verbose
else:
log = logger.Logger(mycc.stdout, verbose)
if t1 is None: t1 = mycc.t1
if t2 is None: t2 = mycc.t2
if eris is None:
raise TypeError('Electron repulsion integrals, `eris`, must be passed in '
'to the CCSD(T) kernel or created in the cc object for '
'the k-point CCSD(T) to run!')
if t1 is None or t2 is None:
raise TypeError('Must pass in t1/t2 amplitudes to k-point CCSD(T)! (Maybe '
'need to run `.ccsd()` on the ccsd object?)')
cell = mycc._scf.cell
kpts = mycc.kpts
# The dtype of any local arrays that will be created
dtype = t1.dtype
nkpts, nocc, nvir = t1.shape
mo_energy_occ = [eris.mo_energy[ki][:nocc] for ki in range(nkpts)]
mo_energy_vir = [eris.mo_energy[ki][nocc:] for ki in range(nkpts)]
mo_energy = np.asarray([eris.mo_energy[ki] for ki in range(nkpts)], dtype=np.float, order='C')
fov = eris.fock[:, :nocc, nocc:]
mo_e = mo_energy
mo_e_o = mo_energy_occ
mo_e_v = mo_energy_vir
# Set up class for k-point conservation
kconserv = kpts_helper.get_kconserv(cell, kpts)
# Create necessary temporary eris for fast read
feri_tmp, t2T, eris_vvop, eris_vooo_C = create_t3_eris(mycc, kconserv, [eris.vovv, eris.oovv, eris.ooov, t2])
t1T = np.array([x.T for x in t1], dtype=np.complex, order='C')
fvo = np.array([x.T for x in fov], dtype=np.complex, order='C')
cpu1 = log.timer_debug1('CCSD(T) tmp eri creation', *cpu1)
#def get_w_old(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1, out=None):
# '''Wijkabc intermediate as described in Scuseria paper before Pijkabc acts'''
# km = kconserv[kc, kk, kb]
# kf = kconserv[kk, kc, kj]
# ret = einsum('kjcf,fiba->abcijk', t2[kk,kj,kc,:,:,c0:c1,:], eris.vovv[kf,ki,kb,:,:,b0:b1,a0:a1].conj())
# ret = ret - einsum('mkbc,jima->abcijk', t2[km,kk,kb,:,:,b0:b1,c0:c1], eris.ooov[kj,ki,km,:,:,:,a0:a1].conj())
# return ret
def get_w(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1):
'''Wijkabc intermediate as described in Scuseria paper before Pijkabc acts
Uses tranposed eris for fast data access.'''
km = kconserv[kc, kk, kb]
kf = kconserv[kk, kc, kj]
out = einsum('cfjk,abif->abcijk', t2T[kc,kf,kj,c0:c1,:,:,:], eris_vvop[ka,kb,ki,a0:a1,b0:b1,:,nocc:])
out = out - einsum('cbmk,aijm->abcijk', t2T[kc,kb,km,c0:c1,b0:b1,:,:], eris_vooo_C[ka,ki,kj,a0:a1,:,:,:])
return out
def get_permuted_w(ki, kj, kk, ka, kb, kc, orb_indices):
'''Pijkabc operating on Wijkabc intermediate as described in Scuseria paper'''
a0, a1, b0, b1, c0, c1 = orb_indices
out = get_w(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1)
out = out + get_w(kj, kk, ki, kb, kc, ka, b0, b1, c0, c1, a0, a1).transpose(2,0,1,5,3,4)
out = out + get_w(kk, ki, kj, kc, ka, kb, c0, c1, a0, a1, b0, b1).transpose(1,2,0,4,5,3)
out = out + get_w(ki, kk, kj, ka, kc, kb, a0, a1, c0, c1, b0, b1).transpose(0,2,1,3,5,4)
out = out + get_w(kk, kj, ki, kc, kb, ka, c0, c1, b0, b1, a0, a1).transpose(2,1,0,5,4,3)
out = out + get_w(kj, ki, kk, kb, ka, kc, b0, b1, a0, a1, c0, c1).transpose(1,0,2,4,3,5)
return out
def get_rw(ki, kj, kk, ka, kb, kc, orb_indices):
'''R operating on Wijkabc intermediate as described in Scuseria paper'''
a0, a1, b0, b1, c0, c1 = orb_indices
ret = (4. * get_permuted_w(ki,kj,kk,ka,kb,kc,orb_indices) +
1. * get_permuted_w(kj,kk,ki,ka,kb,kc,orb_indices).transpose(0,1,2,5,3,4) +
1. * get_permuted_w(kk,ki,kj,ka,kb,kc,orb_indices).transpose(0,1,2,4,5,3) -
2. * get_permuted_w(ki,kk,kj,ka,kb,kc,orb_indices).transpose(0,1,2,3,5,4) -
2. * get_permuted_w(kk,kj,ki,ka,kb,kc,orb_indices).transpose(0,1,2,5,4,3) -
2. * get_permuted_w(kj,ki,kk,ka,kb,kc,orb_indices).transpose(0,1,2,4,3,5))
return ret
#def get_v_old(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1):
# '''Vijkabc intermediate as described in Scuseria paper'''
# km = kconserv[ki,ka,kj]
# kf = kconserv[ki,ka,kj]
# out = np.zeros((a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=dtype)
# if kk == kc:
# out = out + einsum('kc,ijab->abcijk', 0.5*t1[kk,:,c0:c1], eris.oovv[ki,kj,ka,:,:,a0:a1,b0:b1].conj())
# out = out + einsum('kc,ijab->abcijk', 0.5*fov[kk,:,c0:c1], t2[ki,kj,ka,:,:,a0:a1,b0:b1])
# return out
def get_v(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1):
'''Vijkabc intermediate as described in Scuseria paper'''
km = kconserv[ki,ka,kj]
kf = kconserv[ki,ka,kj]
out = np.zeros((a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=dtype)
if kk == kc:
out = out + einsum('ck,baji->abcijk', 0.5*t1T[kk,c0:c1,:], eris_vvop[kb,ka,kj,b0:b1,a0:a1,:,:nocc])
# We see this is the same t2T term needed for the `w` contraction:
# einsum('cbmk,aijm->abcijk', t2T[kc,kb,km,c0:c1,b0:b1], eris_vooo_C[ka,ki,kj,a0:a1])
#
# For the kpoint indices [kk,ki,kj,kc,ka,kb] we have that we need
# t2T[kb,ka,km], where km = kconserv[kb,kj,ka]
# The remaining k-point not used in t2T, i.e. kc, has the condition kc == kk in the case of
# get_v. So, we have from 3-particle conservation
# (kk-kc) + ki + kj - ka - kb = 0,
# i.e. ki = km.
out = out + einsum('ck,baij->abcijk', 0.5*fvo[kk,c0:c1,:], t2T[kb,ka,ki,b0:b1,a0:a1,:,:])
return out
def get_permuted_v(ki, kj, kk, ka, kb, kc, orb_indices):
'''Pijkabc operating on Vijkabc intermediate as described in Scuseria paper'''
a0, a1, b0, b1, c0, c1 = orb_indices
tmp = np.zeros((a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=dtype)
ret = get_v(ki, kj, kk, ka, kb, kc, a0, a1, b0, b1, c0, c1)
ret = ret + get_v(kj, kk, ki, kb, kc, ka, b0, b1, c0, c1, a0, a1).transpose(2,0,1,5,3,4)
ret = ret + get_v(kk, ki, kj, kc, ka, kb, c0, c1, a0, a1, b0, b1).transpose(1,2,0,4,5,3)
ret = ret + get_v(ki, kk, kj, ka, kc, kb, a0, a1, c0, c1, b0, b1).transpose(0,2,1,3,5,4)
ret = ret + get_v(kk, kj, ki, kc, kb, ka, c0, c1, b0, b1, a0, a1).transpose(2,1,0,5,4,3)
ret = ret + get_v(kj, ki, kk, kb, ka, kc, b0, b1, a0, a1, c0, c1).transpose(1,0,2,4,3,5)
return ret
def contract_t3Tv(kpt_indices, orb_indices, data):
'''Calculate t3T(ransposed) array using C driver.'''
ki, kj, kk, ka, kb, kc = kpt_indices
a0, a1, b0, b1, c0, c1 = orb_indices
slices = np.array([a0, a1, b0, b1, c0, c1], dtype=np.int32)
mo_offset = np.array([ki,kj,kk,ka,kb,kc], dtype=np.int32)
vvop_ab = np.asarray(data[0][0], dtype=np.complex, order='C')
vvop_ac = np.asarray(data[0][1], dtype=np.complex, order='C')
vvop_ba = np.asarray(data[0][2], dtype=np.complex, order='C')
vvop_bc = np.asarray(data[0][3], dtype=np.complex, order='C')
vvop_ca = np.asarray(data[0][4], dtype=np.complex, order='C')
vvop_cb = np.asarray(data[0][5], dtype=np.complex, order='C')
vooo_aj = np.asarray(data[1][0], dtype=np.complex, order='C')
vooo_ak = np.asarray(data[1][1], dtype=np.complex, order='C')
vooo_bi = np.asarray(data[1][2], dtype=np.complex, order='C')
vooo_bk = np.asarray(data[1][3], dtype=np.complex, order='C')
vooo_ci = np.asarray(data[1][4], dtype=np.complex, order='C')
vooo_cj = np.asarray(data[1][5], dtype=np.complex, order='C')
t2T_cj = np.asarray(data[2][0], dtype=np.complex, order='C')
t2T_bk = np.asarray(data[2][1], dtype=np.complex, order='C')
t2T_ci = np.asarray(data[2][2], dtype=np.complex, order='C')
t2T_ak = np.asarray(data[2][3], dtype=np.complex, order='C')
t2T_bi = np.asarray(data[2][4], dtype=np.complex, order='C')
t2T_aj = np.asarray(data[2][5], dtype=np.complex, order='C')
t2T_cb = np.asarray(data[3][0], dtype=np.complex, order='C')
t2T_bc = np.asarray(data[3][1], dtype=np.complex, order='C')
t2T_ca = np.asarray(data[3][2], dtype=np.complex, order='C')
t2T_ac = np.asarray(data[3][3], dtype=np.complex, order='C')
t2T_ba = np.asarray(data[3][4], dtype=np.complex, order='C')
t2T_ab = np.asarray(data[3][5], dtype=np.complex, order='C')
data = [vvop_ab, vvop_ac, vvop_ba, vvop_bc, vvop_ca, vvop_cb,
vooo_aj, vooo_ak, vooo_bi, vooo_bk, vooo_ci, vooo_cj,
t2T_cj, t2T_cb, t2T_bk, t2T_bc, t2T_ci, t2T_ca, t2T_ak,
t2T_ac, t2T_bi, t2T_ba, t2T_aj, t2T_ab]
data_ptrs = [x.ctypes.data_as(ctypes.c_void_p) for x in data]
data_ptrs = (ctypes.c_void_p*24)(*data_ptrs)
a0, a1, b0, b1, c0, c1 = task
t3Tw = np.empty((a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=np.complex, order='C')
t3Tv = np.empty((a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=np.complex, order='C')
drv = _ccsd.libcc.CCsd_zcontract_t3T
drv(t3Tw.ctypes.data_as(ctypes.c_void_p),
t3Tv.ctypes.data_as(ctypes.c_void_p),
mo_e.ctypes.data_as(ctypes.c_void_p),
t1T.ctypes.data_as(ctypes.c_void_p),
fvo.ctypes.data_as(ctypes.c_void_p),
ctypes.c_int(nocc), ctypes.c_int(nvir),
ctypes.c_int(nkpts),
mo_offset.ctypes.data_as(ctypes.c_void_p),
slices.ctypes.data_as(ctypes.c_void_p),
data_ptrs)
return t3Tw, t3Tv
def get_data(kpt_indices):
idx_args = get_data_slices(kpt_indices, task, kconserv)
vvop_indices, vooo_indices, t2T_vvop_indices, t2T_vooo_indices = idx_args
vvop_data = [eris_vvop[tuple(x)] for x in vvop_indices]
vooo_data = [eris_vooo_C[tuple(x)] for x in vooo_indices]
t2T_vvop_data = [t2T[tuple(x)] for x in t2T_vvop_indices]
t2T_vooo_data = [t2T[tuple(x)] for x in t2T_vooo_indices]
data = [vvop_data, vooo_data, t2T_vvop_data, t2T_vooo_data]
return data
energy_t = 0.0
# Get location of padded elements in occupied and virtual space
nonzero_opadding, nonzero_vpadding = padding_k_idx(mycc, kind="split")
mem_now = lib.current_memory()[0]
max_memory = max(0, mycc.max_memory - mem_now)
blkmin = 4
# temporary t3 array is size: 2 * nkpts**3 * blksize**3 * nocc**3 * 16
vir_blksize = min(nvir, max(blkmin, int((max_memory*.9e6/16/nocc**3/nkpts**3/2)**(1./3))))
tasks = []
log.debug('max_memory %d MB (%d MB in use)', max_memory, mem_now)
log.debug('virtual blksize = %d (nvir = %d)', nvir, vir_blksize)
for a0, a1 in lib.prange(0, nvir, vir_blksize):
for b0, b1 in lib.prange(0, nvir, vir_blksize):
for c0, c1 in lib.prange(0, nvir, vir_blksize):
tasks.append((a0,a1,b0,b1,c0,c1))
for ka in range(nkpts):
for kb in range(ka+1):
for task_id, task in enumerate(tasks):
a0,a1,b0,b1,c0,c1 = task
my_permuted_w = np.zeros((nkpts,)*3 + (a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=dtype)
my_permuted_v = np.zeros((nkpts,)*3 + (a1-a0,b1-b0,c1-c0) + (nocc,)*3, dtype=dtype)
for ki, kj, kk in product(range(nkpts), repeat=3):
# Find momentum conservation condition for triples
# amplitude t3ijkabc
kc = kpts_helper.get_kconserv3(cell, kpts, [ki, kj, kk, ka, kb])
if not (ka >= kb and kb >= kc):
continue
kpt_indices = [ki,kj,kk,ka,kb,kc]
data = get_data(kpt_indices)
t3Tw, t3Tv = contract_t3Tv(kpt_indices, task, data)
my_permuted_w[ki,kj,kk] = t3Tw
my_permuted_v[ki,kj,kk] = t3Tv
#my_permuted_w[ki,kj,kk] = get_permuted_w(ki,kj,kk,ka,kb,kc,task)
#my_permuted_v[ki,kj,kk] = get_permuted_v(ki,kj,kk,ka,kb,kc,task)
for ki, kj, kk in product(range(nkpts), repeat=3):
# eigenvalue denominator: e(i) + e(j) + e(k)
eijk = _get_epqr([0,nocc,ki,mo_e_o,nonzero_opadding],
[0,nocc,kj,mo_e_o,nonzero_opadding],
[0,nocc,kk,mo_e_o,nonzero_opadding])
# Find momentum conservation condition for triples
# amplitude t3ijkabc
kc = kpts_helper.get_kconserv3(cell, kpts, [ki, kj, kk, ka, kb])
if not (ka >= kb and kb >= kc):
continue
if ka == kb and kb == kc:
symm_kpt = 1.
elif ka == kb or kb == kc:
symm_kpt = 3.
else:
symm_kpt = 6.
eabc = _get_epqr([a0,a1,ka,mo_e_v,nonzero_vpadding],
[b0,b1,kb,mo_e_v,nonzero_vpadding],
[c0,c1,kc,mo_e_v,nonzero_vpadding],
fac=[-1.,-1.,-1.])
eijkabc = (eijk[None,None,None,:,:,:] + eabc[:,:,:,None,None,None])
pwijk = my_permuted_w[ki,kj,kk] + my_permuted_v[ki,kj,kk]
rwijk = (4. * my_permuted_w[ki,kj,kk] +
1. * my_permuted_w[kj,kk,ki].transpose(0,1,2,5,3,4) +
1. * my_permuted_w[kk,ki,kj].transpose(0,1,2,4,5,3) -
2. * my_permuted_w[ki,kk,kj].transpose(0,1,2,3,5,4) -
2. * my_permuted_w[kk,kj,ki].transpose(0,1,2,5,4,3) -
2. * my_permuted_w[kj,ki,kk].transpose(0,1,2,4,3,5))
rwijk = rwijk / eijkabc
energy_t += symm_kpt * einsum('abcijk,abcijk', rwijk, pwijk.conj())
energy_t *= (1. / 3)
energy_t /= nkpts
if abs(energy_t.imag) > 1e-4:
log.warn('Non-zero imaginary part of CCSD(T) energy was found %s', energy_t.imag)
log.timer('CCSD(T)', *cpu0)
log.note('CCSD(T) correction per cell = %.15g', energy_t.real)
log.note('CCSD(T) correction per cell (imag) = %.15g', energy_t.imag)
return energy_t.real
###################################
# Helper function for t3 creation
###################################
def check_read_success(filename, **kwargs):
'''Determine criterion for successfully reading a dataset based on its
meta values.
For now, returns False.'''
def check_write_complete(filename, **kwargs):
'''Check for `completed` attr in file.'''
import os
mode = kwargs.get('mode', 'r')
if not os.path.isfile(filename):
return False
f = h5py.File(filename, mode=mode, **kwargs)
return f.attrs.get('completed', False)
write_complete = check_write_complete(filename, **kwargs)
return False and write_complete
def transpose_t2(t2, nkpts, nocc, nvir, kconserv, out=None):
'''Creates t2.transpose(2,3,1,0).'''
if out is None:
out = np.empty((nkpts,nkpts,nkpts,nvir,nvir,nocc,nocc), dtype=t2.dtype)
# Check if it's stored in lower triangular form
if len(t2.shape) == 7 and t2.shape[:2] == (nkpts, nkpts):
for ki, kj, ka in product(range(nkpts), repeat=3):
kb = kconserv[ki,ka,kj]
out[ka,kb,kj] = t2[ki,kj,ka].transpose(2,3,1,0)
elif len(t2.shape) == 6 and t2.shape[:2] == (nkpts*(nkpts+1)//2, nkpts):
for ki, kj, ka in product(range(nkpts), repeat=3):
kb = kconserv[ki,ka,kj]
# t2[ki,kj,ka] = t2[tril_index(ki,kj),ka] ki<kj
# t2[kj,ki,kb] = t2[ki,kj,ka].transpose(1,0,3,2) ki<kj
# = t2[tril_index(ki,kj),ka].transpose(1,0,3,2)
if ki <= kj:
tril_idx = (kj*(kj+1))//2 + ki
out[ka,kb,kj] = t2[tril_idx,ka].transpose(2,3,1,0).copy()
out[kb,ka,ki] = out[ka,kb,kj].transpose(1,0,3,2)
else:
raise ValueError('No known conversion for t2 shape %s' % t2.shape)
return out
def create_eris_vvop(vovv, oovv, nkpts, nocc, nvir, kconserv, out=None):
'''Creates vvop from vovv and oovv array (physicist notation).'''
nmo = nocc + nvir
assert(vovv.shape == (nkpts,nkpts,nkpts,nvir,nocc,nvir,nvir))
if out is None:
out = np.empty((nkpts,nkpts,nkpts,nvir,nvir,nocc,nmo), dtype=vovv.dtype)
else:
assert(out.shape == (nkpts,nkpts,nkpts,nvir,nvir,nocc,nmo))
for ki, kj, ka in product(range(nkpts), repeat=3):
kb = kconserv[ki,ka,kj]
out[ki,kj,ka,:,:,:,nocc:] = vovv[kb,ka,kj].conj().transpose(3,2,1,0)
if oovv is not None:
out[ki,kj,ka,:,:,:,:nocc] = oovv[kb,ka,kj].conj().transpose(3,2,1,0)
return out
def create_eris_vooo(ooov, nkpts, nocc, nvir, kconserv, out=None):
'''Creates vooo from ooov array.
This is not exactly chemist's notation, but close. Here a chemist notation vooo
is created from physicist ooov, and then the last two indices of vooo are swapped.
'''
assert(ooov.shape == (nkpts,nkpts,nkpts,nocc,nocc,nocc,nvir))
if out is None:
out = np.empty((nkpts,nkpts,nkpts,nvir,nocc,nocc,nocc), dtype=ooov.dtype)
for ki, kj, ka in product(range(nkpts), repeat=3):
kb = kconserv[ki,kj,ka]
# <bj|ai> -> (ba|ji) (Physicist->Chemist)
# (ij|ab) = (ba|ij)* (Permutational symmetry)
# out = (ij|ab).transpose(0,1,3,2)
out[ki,kj,kb] = ooov[kb,kj,ka].conj().transpose(3,1,0,2)
return out
def create_t3_eris(mycc, kconserv, eris, tmpfile='tmp_t3_eris.h5'):
'''Create/transpose necessary eri integrals needed for fast read-in by CCSD(T).'''
eris_vovv, eris_oovv, eris_ooov, t2 = eris
nkpts = mycc.nkpts
nocc = mycc.nocc
nmo = mycc.nmo
nvir = nmo - nocc
nmo = nocc + nvir
feri_tmp = None
h5py_kwargs = {}
feri_tmp_filename = tmpfile
dtype = np.result_type(eris_vovv, eris_oovv, eris_ooov, t2)
if not check_read_success(feri_tmp_filename):
feri_tmp = lib.H5TmpFile(feri_tmp_filename, 'w', **h5py_kwargs)
t2T_out = feri_tmp.create_dataset('t2T',
(nkpts,nkpts,nkpts,nvir,nvir,nocc,nocc), dtype=dtype)
eris_vvop_out = feri_tmp.create_dataset('vvop',
(nkpts,nkpts,nkpts,nvir,nvir,nocc,nmo), dtype=dtype)
eris_vooo_C_out = feri_tmp.create_dataset('vooo_C',
(nkpts,nkpts,nkpts,nvir,nocc,nocc,nocc), dtype=dtype)
transpose_t2(t2, nkpts, nocc, nvir, kconserv, out=t2T_out)
create_eris_vvop(eris_vovv, eris_oovv, nkpts, nocc, nvir, kconserv, out=eris_vvop_out)
create_eris_vooo(eris_ooov, nkpts, nocc, nvir, kconserv, out=eris_vooo_C_out)
feri_tmp.attrs['completed'] = True
feri_tmp.close()
feri_tmp = lib.H5TmpFile(feri_tmp_filename, 'r', **h5py_kwargs)
t2T = feri_tmp['t2T']
eris_vvop = feri_tmp['vvop']
eris_vooo_C = feri_tmp['vooo_C']
mem_now = lib.current_memory()[0]
max_memory = max(0, mycc.max_memory - mem_now)
unit = nkpts**3 * (nvir**2 * nocc**2 + nvir**2 * nmo * nocc + nvir * nocc**3)
if (unit*16 < max_memory): # Store all in memory
t2T = t2T[:]
eris_vvop = eris_vvop[:]
eris_vooo_C = eris_vooo_C[:]
return feri_tmp, t2T, eris_vvop, eris_vooo_C
def _convert_to_int(kpt_indices):
'''Convert all kpoint indices for 3-particle operator to integers.'''
out_indices = [0]*6
for ix, x in enumerate(kpt_indices):
assert isinstance(x, (int, np.int, np.ndarray, list))
if isinstance(x, (np.ndarray)) and (x.ndim == 0):
out_indices[ix] = int(x)
else:
out_indices[ix] = x
return out_indices
def _tile_list(kpt_indices):
'''Similar to a cartesian product but for a list of kpoint indices for
a 3-particle operator.'''
max_length = 0
out_indices = [0]*6
for ix, x in enumerate(kpt_indices):
if hasattr(x, '__len__'):
max_length = max(max_length, len(x))
if max_length == 0:
return kpt_indices
else:
for ix, x in enumerate(kpt_indices):
if isinstance(x, (int, np.int)):
out_indices[ix] = [x] * max_length
else:
out_indices[ix] = x
return map(list, zip(*out_indices))
def zip_kpoints(kpt_indices):
'''Similar to a cartesian product but for a list of kpoint indices for
a 3-particle operator. Ensures all indices are integers.'''
out_indices = _convert_to_int(kpt_indices)
out_indices = _tile_list(out_indices)
return out_indices
def get_data_slices(kpt_indices, orb_indices, kconserv):
kpt_indices = zip_kpoints(kpt_indices)
if isinstance(kpt_indices[0], (int, np.int)): # Ensure we are working
kpt_indices = [kpt_indices] # with a list of lists
a0,a1,b0,b1,c0,c1 = orb_indices
length = len(kpt_indices)*6
def _vijk_indices(kpt_indices, orb_indices, transpose=(0, 1, 2)):
'''Get indices needed for t3 construction and a given transpose of (a,b,c).'''
kpt_indices = ([kpt_indices[x] for x in transpose] +
[kpt_indices[x+3] for x in transpose])
orb_indices = lib.flatten([[orb_indices[2*x], orb_indices[2*x+1]]
for x in transpose])
ki, kj, kk, ka, kb, kc = kpt_indices
a0, a1, b0, b1, c0, c1 = orb_indices
kf = kconserv[ka,ki,kb]
km = kconserv[kc,kk,kb]
sl00 = slice(None, None)
vvop_idx = [ka, kb, ki, slice(a0,a1), slice(b0,b1), sl00, sl00]
vooo_idx = [ka, ki, kj, slice(a0,a1), sl00, sl00, sl00]
t2T_vvop_idx = [kc, kf, kj, slice(c0,c1), sl00, sl00, sl00]
t2T_vooo_idx = [kc, kb, km, slice(c0,c1), sl00, sl00, sl00]
return vvop_idx, vooo_idx, t2T_vvop_idx, t2T_vooo_idx
vvop_indices = [0] * length
vooo_indices = [0] * length
t2T_vvop_indices = [0] * length
t2T_vooo_indices = [0] * length
transpose = [(0, 1, 2), (0, 2, 1), (1, 0, 2),
(1, 2, 0), (2, 0, 1), (2, 1, 0)]
count = 0
for kpt in kpt_indices:
for t in transpose:
vvop_idx, vooo_idx, t2T_vvop_idx, t2T_vooo_idx = _vijk_indices(kpt, orb_indices, t)
vvop_indices[count] = vvop_idx
vooo_indices[count] = vooo_idx
t2T_vvop_indices[count] = t2T_vvop_idx
t2T_vooo_indices[count] = t2T_vooo_idx
count += 1
return vvop_indices, vooo_indices, t2T_vvop_indices, t2T_vooo_indices
def _get_epqr(pindices,qindices,rindices,fac=[1.0,1.0,1.0],large_num=LARGE_DENOM):
'''Create a denominator
fac[0]*e[kp,p0:p1] + fac[1]*e[kq,q0:q1] + fac[2]*e[kr,r0:r1]
where padded elements have been replaced by a large number.
Args:
pindices (5-list of object):
A list of p0, p1, kp, orbital values, and non-zero indices for the first
denominator element.
qindices (5-list of object):
A list of q0, q1, kq, orbital values, and non-zero indices for the second
denominator element.
rindices (5-list of object):
A list of r0, r1, kr, orbital values, and non-zero indices for the third
denominator element.
fac (3-list of float):
Factors to multiply the first and second denominator elements.
large_num (float):
Number to replace the padded elements.
'''
def get_idx(x0,x1,kx,n0_p):
return np.logical_and(n0_p[kx] >= x0, n0_p[kx] < x1)
assert(all([len(x) == 5 for x in [pindices,qindices]]))
p0,p1,kp,mo_e_p,nonzero_p = pindices
q0,q1,kq,mo_e_q,nonzero_q = qindices
r0,r1,kr,mo_e_r,nonzero_r = rindices
fac_p, fac_q, fac_r = fac
epqr = large_num * np.ones((p1-p0,q1-q0,r1-r0), dtype=mo_e_p[0].dtype)
idxp = get_idx(p0,p1,kp,nonzero_p)
idxq = get_idx(q0,q1,kq,nonzero_q)
idxr = get_idx(r0,r1,kr,nonzero_r)
n0_ovp_pqr = np.ix_(nonzero_p[kp][idxp]-p0, nonzero_q[kq][idxq]-q0, nonzero_r[kr][idxr]-r0)
epqr[n0_ovp_pqr] = lib.direct_sum('p,q,r->pqr', fac_p*mo_e_p[kp][p0:p1],
fac_q*mo_e_q[kq][q0:q1],
fac_r*mo_e_r[kr][r0:r1])[n0_ovp_pqr]
#epqr[n0_ovp_pqr] = temp[n0_ovp_pqr]
return epqr
if __name__ == '__main__':
from pyscf.pbc import gto
from pyscf.pbc import scf
from pyscf.pbc import cc
cell = gto.Cell()
cell.atom = '''
C 0.000000000000 0.000000000000 0.000000000000
C 1.685068664391 1.685068664391 1.685068664391
'''
cell.basis = 'gth-szv'
cell.pseudo = 'gth-pade'
cell.a = '''
0.000000000, 3.370137329, 3.370137329
3.370137329, 0.000000000, 3.370137329
3.370137329, 3.370137329, 0.000000000'''
cell.unit = 'B'
cell.verbose = 4
cell.mesh = [24, 24, 24]
cell.build()
nmp = [1,1,4]
kpts = cell.make_kpts(nmp)
kpts -= kpts[0]
kmf = scf.KRHF(cell, kpts=kpts, exxdiv=None)
kmf.conv_tol = 1e-12
kmf.conv_tol_grad = 1e-12
kmf.direct_scf_tol = 1e-16
ehf = kmf.kernel()
mycc = cc.KRCCSD(kmf)
eris = mycc.ao2mo()
ecc, t1, t2 = mycc.kernel(eris=eris)
energy_t = kernel(mycc, eris=eris, verbose=9)
# Start of supercell calculations
from pyscf.pbc.tools.pbc import super_cell
supcell = super_cell(cell, nmp)
supcell.build()
kmf = scf.RHF(supcell, exxdiv=None)
kmf.conv_tol = 1e-12
kmf.conv_tol_grad = 1e-12
kmf.direct_scf_tol = 1e-16
sup_ehf = kmf.kernel()
myscc = cc.RCCSD(kmf)
eris = myscc.ao2mo()
sup_ecc, t1, t2 = myscc.kernel(eris=eris)
sup_energy_t = myscc.ccsd_t(eris=eris)
print("Kpoint CCSD: %20.16f" % ecc)
print("Supercell CCSD: %20.16f" % (sup_ecc/np.prod(nmp)))
print("Kpoint CCSD(T): %20.16f" % energy_t)
print("Supercell CCSD(T): %20.16f" % (sup_energy_t/np.prod(nmp)))
|
gkc1000/pyscf
|
pyscf/pbc/cc/kccsd_t_rhf.py
|
Python
|
apache-2.0
| 28,391
|
[
"PySCF"
] |
0aa0179bce135e6f769dd0a0a13f05942b4bf1040908438c42245bc5bcefc6f7
|
# Copyright (c) Charl P. Botha, TU Delft
# All rights reserved.
# See COPYRIGHT for details.
import itk
import module_kits.itk_kit as itk_kit
from module_base import ModuleBase
from module_mixins import ScriptedConfigModuleMixin
class gradientMagnitudeGaussian(ScriptedConfigModuleMixin, ModuleBase):
def __init__(self, module_manager):
ModuleBase.__init__(self, module_manager)
self._config.gaussianSigma = 0.7
self._config.normaliseAcrossScale = False
configList = [
('Gaussian sigma', 'gaussianSigma', 'base:float', 'text',
'Sigma in terms of image spacing.'),
('Normalise across scale', 'normaliseAcrossScale', 'base:bool',
'checkbox', 'Determine normalisation factor.')]
# setup the pipeline
self._gradientMagnitude = None
img_type = itk.Image.F3
self._create_pipeline(img_type)
ScriptedConfigModuleMixin.__init__(
self, configList,
{'Module (self)' : self,
'itkGradientMagnitudeRecursiveGaussianImageFilter' :
self._gradientMagnitude})
self.sync_module_logic_with_config()
def close(self):
# we play it safe... (the graph_editor/module_manager should have
# disconnected us by now)
for input_idx in range(len(self.get_input_descriptions())):
self.set_input(input_idx, None)
# this will take care of all display thingies
ScriptedConfigModuleMixin.close(self)
# and the baseclass close
ModuleBase.close(self)
# remove all bindings
del self._gradientMagnitude
def execute_module(self):
self._gradientMagnitude.Update()
def get_input_descriptions(self):
return ('ITK Image (3D, float)',)
def set_input(self, idx, inputStream):
try:
self._gradientMagnitude.SetInput(inputStream)
except TypeError, e:
# deduce the type
itku = itk_kit.utils
ss = itku.get_img_type_and_dim_shortstring(inputStream)
img_type = getattr(itk.Image,ss)
# try to build a new pipeline (will throw exception if it
# can't)
self._create_pipeline(img_type)
# re-apply config
self.sync_module_logic_with_config()
# connect input and hope it works.
self._gradientMagnitude.SetInput(inputStream)
def get_output_descriptions(self):
return ('ITK Image',)
def get_output(self, idx):
return self._gradientMagnitude.GetOutput()
def config_to_logic(self):
self._gradientMagnitude.SetSigma(self._config.gaussianSigma)
self._gradientMagnitude.SetNormalizeAcrossScale(
self._config.normaliseAcrossScale)
def logic_to_config(self):
# durnit, there's no GetSigma(). Doh.
self._config.normaliseAcrossScale = self._gradientMagnitude.\
GetNormalizeAcrossScale()
def _create_pipeline(self, img_type):
"""Standard pattern to create ITK pipeline according to passed
image type.
"""
c = itk.GradientMagnitudeRecursiveGaussianImageFilter
try:
g = c[img_type, img_type].New()
except KeyError, e:
emsg = 'Could not create GradMag with input type %s. '\
'Please try a different input type.' % (ss,)
raise TypeError, emsg
# if successful, we can disconnect the old filter and store
# the instance (needed for the progress call!)
if self._gradientMagnitude:
self._gradientMagnitude.SetInput(None)
self._gradientMagnitude = g
itk_kit.utils.setupITKObjectProgress(
self, self._gradientMagnitude,
'itkGradientMagnitudeRecursiveGaussianImageFilter',
'Calculating gradient image')
|
nagyistoce/devide
|
modules/insight/gradientMagnitudeGaussian.py
|
Python
|
bsd-3-clause
| 4,029
|
[
"Gaussian"
] |
fb632b6e98ea2eff054164e3e7e8116432322c3c10947d2e6effe6330e7225a7
|
""" This is a test of the creation of the json dump file
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import os
from diraccfg import CFG
from DIRAC.WorkloadManagementSystem.Utilities.PilotCStoJSONSynchronizer import PilotCStoJSONSynchronizer
from DIRAC.ConfigurationSystem.private.ConfigurationClient import ConfigurationClient
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
# pylint: disable=protected-access
class PilotCStoJSONSynchronizerTestCase(unittest.TestCase):
""" Base class for the PilotCStoJSONSynchronizer test cases
"""
def setUp(self):
# Creating test configuration file
self.clearCFG()
self.testCfgFileName = 'test.cfg'
cfgContent = '''
DIRAC
{
Setup=TestSetup
Setups
{
TestSetup
{
WorkloadManagement=MyWM
}
}
}
Systems
{
WorkloadManagement
{
MyWM
{
URLs
{
Service1 = dips://server1:1234/WorkloadManagement/Service1
Service2 = dips://$MAINSERVERS$:5678/WorkloadManagement/Service2
}
FailoverURLs
{
Service2 = dips://failover1:5678/WorkloadManagement/Service2
}
}
}
}
Operations
{
Defaults
{
Pilot
{
Project = LHCb
GenericPilotDN = /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=doe/CN=111213/CN=Joe Doe
GenericPilotGroup = xxx_pilot
}
MainServers = gw1, gw2
}
}
Registry
{
Users
{
ttester
{
DN = /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ttester/CN=696969/CN=Thomas Tester
CA = /DC=ch/DC=cern/CN=CERN Grid Certification Authority
Email = thomas.tester@cern.ch
}
franekbolek
{
DN = /DC=ch/DC=voodo/OU=Organic Units/OU=Users/CN=franekbolek/CN=111122/CN=Franek Bolek
CA = /DC=ch/DC=voodo/CN=Voodo Grid Certification Authority
Email = franek.bolek@voodo.pl
}
}
Groups
{
lhcb_pilot
{
#@@-host - /DC=ch/DC=voodo/OU=computers/CN=brabra.voodo.pl
Users = franekbolek
Users += ttester
Properties = GenericPilot
Properties += LimitedDelegation
VOMSRole = /lhcb/Role=pilot
#@@-ggg@diracAdmin - 2015-07-07 13:40:55
VO = lhcb
}
}
}
Resources
{
Sites
{
Tests
{
Tests.Testing.tst
{
CEs
{
test1.Testing.tst
{
CEType = Tester
}
}
}
}
}
}
'''
with open(self.testCfgFileName, 'w') as f:
f.write(cfgContent)
# we replace the configuration by our own one.
gConfig = ConfigurationClient(fileToLoadList=[self.testCfgFileName])
self.setup = gConfig.getValue('/DIRAC/Setup', '')
self.wm = gConfig.getValue('DIRAC/Setups/' + self.setup + '/WorkloadManagement', '')
def tearDown(self):
for aFile in [self.testCfgFileName, 'pilot.json']:
try:
os.remove(aFile)
except OSError:
pass
self.clearCFG()
@staticmethod
def clearCFG():
"""SUPER UGLY: one must recreate the CFG objects of gConfigurationData
not to conflict with other tests that might be using a local dirac.cfg"""
gConfigurationData.localCFG = CFG()
gConfigurationData.remoteCFG = CFG()
gConfigurationData.mergedCFG = CFG()
gConfigurationData.generateNewVersion()
class Test_PilotCStoJSONSynchronizer_sync(PilotCStoJSONSynchronizerTestCase):
def test_success(self):
synchroniser = PilotCStoJSONSynchronizer()
res = synchroniser.getCSDict()
assert res['OK'], res['Message']
res = synchroniser.getCSDict(includeMasterCS=False)
assert res['OK'], res['Message']
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(PilotCStoJSONSynchronizerTestCase)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(Test_PilotCStoJSONSynchronizer_sync))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
|
yujikato/DIRAC
|
src/DIRAC/WorkloadManagementSystem/Utilities/test/Test_PilotCStoJSONSynchronizer.py
|
Python
|
gpl-3.0
| 4,327
|
[
"DIRAC"
] |
7a21c56e393c98fd70bc0057763ce0c5f198bb6771965a5a7bc6a57a5c47ef9a
|
# -*- coding: utf-8 -*-
"""
discord.ext.colours.crayon
~~~~~~~~~~~~~~~~~~~~~
An extension class to extend discord.py's colour class with XKCD colour presets.
( From https://xkcd.com/color/rgb/ )
:license: GPL, see LICENSE for more details.
"""
from discord.colour import Colour
class XKCDColour(Colour):
"""Represents a Discord role colour with XKCD colour poll presets.
For a full list of color codes visit https://xkcd.com/color/rgb/
"""
@classmethod
def rust(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa83c09``."""
return cls(0xa83c09)
@classmethod
def jade(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1fa774``."""
return cls(0x1fa774)
@classmethod
def ice(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd6fffa``."""
return cls(0xd6fffa)
@classmethod
def burgundy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x610023``."""
return cls(0x610023)
@classmethod
def pastel_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb0ff9d``."""
return cls(0xb0ff9d)
@classmethod
def caramel(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaf6f09``."""
return cls(0xaf6f09)
@classmethod
def mauve(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xae7181``."""
return cls(0xae7181)
@classmethod
def nice_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x107ab0``."""
return cls(0x107ab0)
@classmethod
def pinkish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc8aca9``."""
return cls(0xc8aca9)
@classmethod
def purply_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x661aee``."""
return cls(0x661aee)
@classmethod
def sand_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfce166``."""
return cls(0xfce166)
@classmethod
def purplish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7a687f``."""
return cls(0x7a687f)
@classmethod
def warm_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x978a84``."""
return cls(0x978a84)
@classmethod
def dark_blue_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x005249``."""
return cls(0x005249)
@classmethod
def slate(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x516572``."""
return cls(0x516572)
@classmethod
def mid_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x50a747``."""
return cls(0x50a747)
@classmethod
def light_grass_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9af764``."""
return cls(0x9af764)
@classmethod
def milk_chocolate(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f4e1e``."""
return cls(0x7f4e1e)
@classmethod
def neon_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe019a``."""
return cls(0xfe019a)
@classmethod
def blue_with_a_hint_of_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x533cc6``."""
return cls(0x533cc6)
@classmethod
def bright_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x87fd05``."""
return cls(0x87fd05)
@classmethod
def brownish_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc9b003``."""
return cls(0xc9b003)
@classmethod
def pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff81c0``."""
return cls(0xff81c0)
@classmethod
def stormy_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x507b9c``."""
return cls(0x507b9c)
@classmethod
def piss_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xddd618``."""
return cls(0xddd618)
@classmethod
def gross_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa0bf16``."""
return cls(0xa0bf16)
@classmethod
def kiwi_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8ee53f``."""
return cls(0x8ee53f)
@classmethod
def pistachio(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc0fa8b``."""
return cls(0xc0fa8b)
@classmethod
def pastel_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff964f``."""
return cls(0xff964f)
@classmethod
def claret(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x680018``."""
return cls(0x680018)
@classmethod
def shamrock_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02c14d``."""
return cls(0x02c14d)
@classmethod
def azure(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x069af3``."""
return cls(0x069af3)
@classmethod
def bubble_gum_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff69af``."""
return cls(0xff69af)
@classmethod
def greeny_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x42b395``."""
return cls(0x42b395)
@classmethod
def rust_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc45508``."""
return cls(0xc45508)
@classmethod
def light_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbf77f6``."""
return cls(0xbf77f6)
@classmethod
def toxic_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x61de2a``."""
return cls(0x61de2a)
@classmethod
def mustard(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xceb301``."""
return cls(0xceb301)
@classmethod
def light_light_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc8ffb0``."""
return cls(0xc8ffb0)
@classmethod
def cinnamon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac4f06``."""
return cls(0xac4f06)
@classmethod
def battleship_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6b7c85``."""
return cls(0x6b7c85)
@classmethod
def blood_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe4b03``."""
return cls(0xfe4b03)
@classmethod
def very_light_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd3b683``."""
return cls(0xd3b683)
@classmethod
def dark_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb416b``."""
return cls(0xcb416b)
@classmethod
def denim(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3b638c``."""
return cls(0x3b638c)
@classmethod
def brown_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x922b05``."""
return cls(0x922b05)
@classmethod
def dusty_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd58a94``."""
return cls(0xd58a94)
@classmethod
def apricot(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffb16d``."""
return cls(0xffb16d)
@classmethod
def red_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd3c06``."""
return cls(0xfd3c06)
@classmethod
def slate_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x59656d``."""
return cls(0x59656d)
@classmethod
def vibrant_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad03de``."""
return cls(0xad03de)
@classmethod
def murky_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6c7a0e``."""
return cls(0x6c7a0e)
@classmethod
def booger_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x96b403``."""
return cls(0x96b403)
@classmethod
def purpleish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdf4ec8``."""
return cls(0xdf4ec8)
@classmethod
def chocolate_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x411900``."""
return cls(0x411900)
@classmethod
def chestnut(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x742802``."""
return cls(0x742802)
@classmethod
def burnt_siena(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb75203``."""
return cls(0xb75203)
@classmethod
def rust_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8b3103``."""
return cls(0x8b3103)
@classmethod
def light_cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xacfffc``."""
return cls(0xacfffc)
@classmethod
def greenish_beige(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc9d179``."""
return cls(0xc9d179)
@classmethod
def bright_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc760ff``."""
return cls(0xc760ff)
@classmethod
def aqua_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x12e193``."""
return cls(0x12e193)
@classmethod
def dark_indigo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1f0954``."""
return cls(0x1f0954)
@classmethod
def grey_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x826d8c``."""
return cls(0x826d8c)
@classmethod
def light_light_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcafffb``."""
return cls(0xcafffb)
@classmethod
def dark_aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x05696b``."""
return cls(0x05696b)
@classmethod
def light_eggplant(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x894585``."""
return cls(0x894585)
@classmethod
def baby_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffb7ce``."""
return cls(0xffb7ce)
@classmethod
def true_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x089404``."""
return cls(0x089404)
@classmethod
def pea_soup_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x94a617``."""
return cls(0x94a617)
@classmethod
def vomit_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc7c10c``."""
return cls(0xc7c10c)
@classmethod
def dusty_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac86a8``."""
return cls(0xac86a8)
@classmethod
def light_khaki(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe6f2a2``."""
return cls(0xe6f2a2)
@classmethod
def light_mint_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa6fbb2``."""
return cls(0xa6fbb2)
@classmethod
def boring_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x63b365``."""
return cls(0x63b365)
@classmethod
def wintergreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x20f986``."""
return cls(0x20f986)
@classmethod
def wisteria(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa87dc2``."""
return cls(0xa87dc2)
@classmethod
def grey_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f7053``."""
return cls(0x7f7053)
@classmethod
def dark_lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9c6da5``."""
return cls(0x9c6da5)
@classmethod
def purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7e1e9c``."""
return cls(0x7e1e9c)
@classmethod
def yellow_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc8fd3d``."""
return cls(0xc8fd3d)
@classmethod
def light_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd8dcd6``."""
return cls(0xd8dcd6)
@classmethod
def bluegreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x017a79``."""
return cls(0x017a79)
@classmethod
def deep_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02590f``."""
return cls(0x02590f)
@classmethod
def leafy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x51b73b``."""
return cls(0x51b73b)
@classmethod
def olive(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6e750e``."""
return cls(0x6e750e)
@classmethod
def watermelon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd4659``."""
return cls(0xfd4659)
@classmethod
def orangey_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdb915``."""
return cls(0xfdb915)
@classmethod
def mud_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x606602``."""
return cls(0x606602)
@classmethod
def flat_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x699d4c``."""
return cls(0x699d4c)
@classmethod
def greenish_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x32bf84``."""
return cls(0x32bf84)
@classmethod
def orange_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd411e``."""
return cls(0xfd411e)
@classmethod
def red_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfa2a55``."""
return cls(0xfa2a55)
@classmethod
def silver(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc5c9c7``."""
return cls(0xc5c9c7)
@classmethod
def seaweed_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x35ad6b``."""
return cls(0x35ad6b)
@classmethod
def barney(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac1db8``."""
return cls(0xac1db8)
@classmethod
def bright_sea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x05ffa6``."""
return cls(0x05ffa6)
@classmethod
def very_dark_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x062e03``."""
return cls(0x062e03)
@classmethod
def camo_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x526525``."""
return cls(0x526525)
@classmethod
def dusty_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x825f87``."""
return cls(0x825f87)
@classmethod
def dark_olive_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3c4d03``."""
return cls(0x3c4d03)
@classmethod
def windows_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3778bf``."""
return cls(0x3778bf)
@classmethod
def dark_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaf884a``."""
return cls(0xaf884a)
@classmethod
def nasty_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x70b23f``."""
return cls(0x70b23f)
@classmethod
def dark_cream(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfff39a``."""
return cls(0xfff39a)
@classmethod
def yellowgreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbbf90f``."""
return cls(0xbbf90f)
@classmethod
def warm_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x952e8f``."""
return cls(0x952e8f)
@classmethod
def dirty_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x667e2c``."""
return cls(0x667e2c)
@classmethod
def camouflage_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4b6113``."""
return cls(0x4b6113)
@classmethod
def orangered(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe420f``."""
return cls(0xfe420f)
@classmethod
def brown_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x706c11``."""
return cls(0x706c11)
@classmethod
def very_dark_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1d0200``."""
return cls(0x1d0200)
@classmethod
def grey_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5e9b8a``."""
return cls(0x5e9b8a)
@classmethod
def apple_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x76cd26``."""
return cls(0x76cd26)
@classmethod
def cadet_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4e7496``."""
return cls(0x4e7496)
@classmethod
def midnight(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x03012d``."""
return cls(0x03012d)
@classmethod
def bright_lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc95efb``."""
return cls(0xc95efb)
@classmethod
def bright_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01ff07``."""
return cls(0x01ff07)
@classmethod
def taupe(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb9a281``."""
return cls(0xb9a281)
@classmethod
def powder_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffb2d0``."""
return cls(0xffb2d0)
@classmethod
def light_pastel_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb2fba5``."""
return cls(0xb2fba5)
@classmethod
def puke_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9aae07``."""
return cls(0x9aae07)
@classmethod
def canary_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffe40``."""
return cls(0xfffe40)
@classmethod
def blood(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x770001``."""
return cls(0x770001)
@classmethod
def mocha(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9d7651``."""
return cls(0x9d7651)
@classmethod
def dark(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1b2431``."""
return cls(0x1b2431)
@classmethod
def dark_cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0a888a``."""
return cls(0x0a888a)
@classmethod
def dark_sand(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa88f59``."""
return cls(0xa88f59)
@classmethod
def lightish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa552e6``."""
return cls(0xa552e6)
@classmethod
def sun_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffdf22``."""
return cls(0xffdf22)
@classmethod
def cherry_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf7022a``."""
return cls(0xf7022a)
@classmethod
def toupe(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc7ac7d``."""
return cls(0xc7ac7d)
@classmethod
def light_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfbeeac``."""
return cls(0xfbeeac)
@classmethod
def pale_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb1916e``."""
return cls(0xb1916e)
@classmethod
def dark_sage(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x598556``."""
return cls(0x598556)
@classmethod
def golden_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfec615``."""
return cls(0xfec615)
@classmethod
def racing_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x014600``."""
return cls(0x014600)
@classmethod
def vivid_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9900fa``."""
return cls(0x9900fa)
@classmethod
def fresh_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x69d84f``."""
return cls(0x69d84f)
@classmethod
def burnt_umber(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa0450e``."""
return cls(0xa0450e)
@classmethod
def deep_sea_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x015482``."""
return cls(0x015482)
@classmethod
def duck_egg_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc3fbf4``."""
return cls(0xc3fbf4)
@classmethod
def maize(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf4d054``."""
return cls(0xf4d054)
@classmethod
def sunny_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfff917``."""
return cls(0xfff917)
@classmethod
def khaki(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaaa662``."""
return cls(0xaaa662)
@classmethod
def dull_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5f9e8f``."""
return cls(0x5f9e8f)
@classmethod
def dark_coral(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcf524e``."""
return cls(0xcf524e)
@classmethod
def baby_poop_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8f9805``."""
return cls(0x8f9805)
@classmethod
def light_aquamarine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7bfdc7``."""
return cls(0x7bfdc7)
@classmethod
def lightish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3d7afd``."""
return cls(0x3d7afd)
@classmethod
def brownish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x86775f``."""
return cls(0x86775f)
@classmethod
def bluey_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x89a0b0``."""
return cls(0x89a0b0)
@classmethod
def cornflower(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6a79f7``."""
return cls(0x6a79f7)
@classmethod
def dirty_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc87606``."""
return cls(0xc87606)
@classmethod
def straw(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfcf679``."""
return cls(0xfcf679)
@classmethod
def sage_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x88b378``."""
return cls(0x88b378)
@classmethod
def acid_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8ffe09``."""
return cls(0x8ffe09)
@classmethod
def bluish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x748b97``."""
return cls(0x748b97)
@classmethod
def pale_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdc1c5``."""
return cls(0xfdc1c5)
@classmethod
def ultramarine_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1805db``."""
return cls(0x1805db)
@classmethod
def neon_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcfff04``."""
return cls(0xcfff04)
@classmethod
def light_neon_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4efd54``."""
return cls(0x4efd54)
@classmethod
def bottle_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x044a05``."""
return cls(0x044a05)
@classmethod
def twilight(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4e518b``."""
return cls(0x4e518b)
@classmethod
def poop_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7a5901``."""
return cls(0x7a5901)
@classmethod
def eggplant(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x380835``."""
return cls(0x380835)
@classmethod
def fuchsia(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xed0dd9``."""
return cls(0xed0dd9)
@classmethod
def cool_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x95a3a6``."""
return cls(0x95a3a6)
@classmethod
def sandstone(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc9ae74``."""
return cls(0xc9ae74)
@classmethod
def vomit(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa2a415``."""
return cls(0xa2a415)
@classmethod
def dark_aquamarine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x017371``."""
return cls(0x017371)
@classmethod
def pinky_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc94cbe``."""
return cls(0xc94cbe)
@classmethod
def washed_out_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbcf5a6``."""
return cls(0xbcf5a6)
@classmethod
def dark_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc65102``."""
return cls(0xc65102)
@classmethod
def grey_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6b8ba4``."""
return cls(0x6b8ba4)
@classmethod
def dull_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd5869d``."""
return cls(0xd5869d)
@classmethod
def very_dark_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2a0134``."""
return cls(0x2a0134)
@classmethod
def tan_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xab7e4c``."""
return cls(0xab7e4c)
@classmethod
def sea(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3c9992``."""
return cls(0x3c9992)
@classmethod
def slate_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5b7c99``."""
return cls(0x5b7c99)
@classmethod
def mint(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9ffeb0``."""
return cls(0x9ffeb0)
@classmethod
def dull_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x49759c``."""
return cls(0x49759c)
@classmethod
def lightblue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7bc8f6``."""
return cls(0x7bc8f6)
@classmethod
def light_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7ef4cc``."""
return cls(0x7ef4cc)
@classmethod
def purply_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf075e6``."""
return cls(0xf075e6)
@classmethod
def dark_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x341c02``."""
return cls(0x341c02)
@classmethod
def drab(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x828344``."""
return cls(0x828344)
@classmethod
def orangish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb25f03``."""
return cls(0xb25f03)
@classmethod
def vivid_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2fef10``."""
return cls(0x2fef10)
@classmethod
def light_grey_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9dbcd4``."""
return cls(0x9dbcd4)
@classmethod
def mushroom(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xba9e88``."""
return cls(0xba9e88)
@classmethod
def royal_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4b006e``."""
return cls(0x4b006e)
@classmethod
def pale_mauve(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfed0fc``."""
return cls(0xfed0fc)
@classmethod
def dark_fuchsia(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9d0759``."""
return cls(0x9d0759)
@classmethod
def pastel_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdb5856``."""
return cls(0xdb5856)
@classmethod
def golden_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb27a01``."""
return cls(0xb27a01)
@classmethod
def marigold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfcc006``."""
return cls(0xfcc006)
@classmethod
def light_yellow_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xccfd7f``."""
return cls(0xccfd7f)
@classmethod
def greyish_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x719f91``."""
return cls(0x719f91)
@classmethod
def pale_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa5fbd5``."""
return cls(0xa5fbd5)
@classmethod
def cool_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x33b864``."""
return cls(0x33b864)
@classmethod
def dark_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x34013f``."""
return cls(0x34013f)
@classmethod
def orangey_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb16002``."""
return cls(0xb16002)
@classmethod
def pale_salmon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffb19a``."""
return cls(0xffb19a)
@classmethod
def butterscotch(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdb147``."""
return cls(0xfdb147)
@classmethod
def grey_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x647d8e``."""
return cls(0x647d8e)
@classmethod
def viridian(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1e9167``."""
return cls(0x1e9167)
@classmethod
def clay(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb66a50``."""
return cls(0xb66a50)
@classmethod
def light_gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfddc5c``."""
return cls(0xfddc5c)
@classmethod
def pale_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffa756``."""
return cls(0xffa756)
@classmethod
def violet_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfb5ffc``."""
return cls(0xfb5ffc)
@classmethod
def browny_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xca6b02``."""
return cls(0xca6b02)
@classmethod
def greyblue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x77a1b5``."""
return cls(0x77a1b5)
@classmethod
def greyish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7a6a4f``."""
return cls(0x7a6a4f)
@classmethod
def indigo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x380282``."""
return cls(0x380282)
@classmethod
def pale_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd9544d``."""
return cls(0xd9544d)
@classmethod
def barf_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x94ac02``."""
return cls(0x94ac02)
@classmethod
def dusk(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4e5481``."""
return cls(0x4e5481)
@classmethod
def dark_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd5b60a``."""
return cls(0xd5b60a)
@classmethod
def coffee(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa6814c``."""
return cls(0xa6814c)
@classmethod
def really_light_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd4ffff``."""
return cls(0xd4ffff)
@classmethod
def light_burgundy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa8415b``."""
return cls(0xa8415b)
@classmethod
def leaf(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x71aa34``."""
return cls(0x71aa34)
@classmethod
def chartreuse(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc1f80a``."""
return cls(0xc1f80a)
@classmethod
def cream(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffc2``."""
return cls(0xffffc2)
@classmethod
def icky_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8fae22``."""
return cls(0x8fae22)
@classmethod
def sandy_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdee73``."""
return cls(0xfdee73)
@classmethod
def light_periwinkle(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc1c6fc``."""
return cls(0xc1c6fc)
@classmethod
def mango(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffa62b``."""
return cls(0xffa62b)
@classmethod
def sand(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe2ca76``."""
return cls(0xe2ca76)
@classmethod
def dark_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x033500``."""
return cls(0x033500)
@classmethod
def royal_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0504aa``."""
return cls(0x0504aa)
@classmethod
def emerald_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x028f1e``."""
return cls(0x028f1e)
@classmethod
def khaki_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x728639``."""
return cls(0x728639)
@classmethod
def orangeish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd8d49``."""
return cls(0xfd8d49)
@classmethod
def dusty_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5a86ad``."""
return cls(0x5a86ad)
@classmethod
def light_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd6b4fc``."""
return cls(0xd6b4fc)
@classmethod
def blood_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x980002``."""
return cls(0x980002)
@classmethod
def fluro_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0aff02``."""
return cls(0x0aff02)
@classmethod
def dirt(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8a6e45``."""
return cls(0x8a6e45)
@classmethod
def strong_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff0789``."""
return cls(0xff0789)
@classmethod
def hunter_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0b4008``."""
return cls(0x0b4008)
@classmethod
def charcoal_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3c4142``."""
return cls(0x3c4142)
@classmethod
def brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x653700``."""
return cls(0x653700)
@classmethod
def burnt_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc04e01``."""
return cls(0xc04e01)
@classmethod
def rusty_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcd5909``."""
return cls(0xcd5909)
@classmethod
def baby_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa2cffe``."""
return cls(0xa2cffe)
@classmethod
def tomato_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xec2d01``."""
return cls(0xec2d01)
@classmethod
def steel_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5a7d9a``."""
return cls(0x5a7d9a)
@classmethod
def celadon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbefdb7``."""
return cls(0xbefdb7)
@classmethod
def steel_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6f828a``."""
return cls(0x6f828a)
@classmethod
def aqua_marine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2ee8bb``."""
return cls(0x2ee8bb)
@classmethod
def medium_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2c6fbb``."""
return cls(0x2c6fbb)
@classmethod
def puke(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa5a502``."""
return cls(0xa5a502)
@classmethod
def sandy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf1da7a``."""
return cls(0xf1da7a)
@classmethod
def dark_slate_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x214761``."""
return cls(0x214761)
@classmethod
def dusty_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x76a973``."""
return cls(0x76a973)
@classmethod
def deep_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9a0200``."""
return cls(0x9a0200)
@classmethod
def bile(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb5c306``."""
return cls(0xb5c306)
@classmethod
def purpley(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8756e4``."""
return cls(0x8756e4)
@classmethod
def light_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x90e4c1``."""
return cls(0x90e4c1)
@classmethod
def pale_cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb7fffa``."""
return cls(0xb7fffa)
@classmethod
def peach(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffb07c``."""
return cls(0xffb07c)
@classmethod
def light_peach(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffd8b1``."""
return cls(0xffd8b1)
@classmethod
def rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcf6275``."""
return cls(0xcf6275)
@classmethod
def soft_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6fc276``."""
return cls(0x6fc276)
@classmethod
def warm_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x964e02``."""
return cls(0x964e02)
@classmethod
def pinkish_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd99b82``."""
return cls(0xd99b82)
@classmethod
def bubblegum_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe83cc``."""
return cls(0xfe83cc)
@classmethod
def buff(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfef69e``."""
return cls(0xfef69e)
@classmethod
def bright_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff000d``."""
return cls(0xff000d)
@classmethod
def plum(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x580f41``."""
return cls(0x580f41)
@classmethod
def medium_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9e43a2``."""
return cls(0x9e43a2)
@classmethod
def wheat(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfbdd7e``."""
return cls(0xfbdd7e)
@classmethod
def burnt_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9f2305``."""
return cls(0x9f2305)
@classmethod
def crimson(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8c000f``."""
return cls(0x8c000f)
@classmethod
def ugly_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcd7584``."""
return cls(0xcd7584)
@classmethod
def turquoise_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x06b1c4``."""
return cls(0x06b1c4)
@classmethod
def blush(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf29e8e``."""
return cls(0xf29e8e)
@classmethod
def orangey_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfa4224``."""
return cls(0xfa4224)
@classmethod
def key_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaeff6e``."""
return cls(0xaeff6e)
@classmethod
def lemon_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdff38``."""
return cls(0xfdff38)
@classmethod
def kelley_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x009337``."""
return cls(0x009337)
@classmethod
def dark_hot_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd90166``."""
return cls(0xd90166)
@classmethod
def baby_poop(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x937c00``."""
return cls(0x937c00)
@classmethod
def red_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9e0168``."""
return cls(0x9e0168)
@classmethod
def amethyst(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9b5fc0``."""
return cls(0x9b5fc0)
@classmethod
def bruise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7e4071``."""
return cls(0x7e4071)
@classmethod
def baby_puke_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb6c406``."""
return cls(0xb6c406)
@classmethod
def beige(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe6daa6``."""
return cls(0xe6daa6)
@classmethod
def ocre(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc69c04``."""
return cls(0xc69c04)
@classmethod
def muted_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3b719f``."""
return cls(0x3b719f)
@classmethod
def puke_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc2be0e``."""
return cls(0xc2be0e)
@classmethod
def burple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6832e3``."""
return cls(0x6832e3)
@classmethod
def lightish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x61e160``."""
return cls(0x61e160)
@classmethod
def greenish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x96ae8d``."""
return cls(0x96ae8d)
@classmethod
def butter(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffff81``."""
return cls(0xffff81)
@classmethod
def cerulean_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x056eee``."""
return cls(0x056eee)
@classmethod
def pinky(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfc86aa``."""
return cls(0xfc86aa)
@classmethod
def reddish_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x997570``."""
return cls(0x997570)
@classmethod
def light_mustard(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf7d560``."""
return cls(0xf7d560)
@classmethod
def faded_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x916e99``."""
return cls(0x916e99)
@classmethod
def wine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x80013f``."""
return cls(0x80013f)
@classmethod
def bordeaux(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7b002c``."""
return cls(0x7b002c)
@classmethod
def coral_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff6163``."""
return cls(0xff6163)
@classmethod
def cool_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4984b8``."""
return cls(0x4984b8)
@classmethod
def petrol(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x005f6a``."""
return cls(0x005f6a)
@classmethod
def hot_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb00f5``."""
return cls(0xcb00f5)
@classmethod
def violet_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x510ac9``."""
return cls(0x510ac9)
@classmethod
def iris(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6258c4``."""
return cls(0x6258c4)
@classmethod
def light_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff474c``."""
return cls(0xff474c)
@classmethod
def purpley_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x947e94``."""
return cls(0x947e94)
@classmethod
def fire_engine_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe0002``."""
return cls(0xfe0002)
@classmethod
def camel(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc69f59``."""
return cls(0xc69f59)
@classmethod
def vivid_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x152eff``."""
return cls(0x152eff)
@classmethod
def lightgreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x76ff7b``."""
return cls(0x76ff7b)
@classmethod
def sky(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x82cafc``."""
return cls(0x82cafc)
@classmethod
def pig_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe78ea5``."""
return cls(0xe78ea5)
@classmethod
def ultramarine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2000b1``."""
return cls(0x2000b1)
@classmethod
def dark_gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb59410``."""
return cls(0xb59410)
@classmethod
def brick(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa03623``."""
return cls(0xa03623)
@classmethod
def electric_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaa23ff``."""
return cls(0xaa23ff)
@classmethod
def diarrhea(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9f8303``."""
return cls(0x9f8303)
@classmethod
def dark_maroon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3c0008``."""
return cls(0x3c0008)
@classmethod
def light_navy_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2e5a88``."""
return cls(0x2e5a88)
@classmethod
def light_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfa5ff7``."""
return cls(0xfa5ff7)
@classmethod
def kelly_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02ab2e``."""
return cls(0x02ab2e)
@classmethod
def mustard_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac7e04``."""
return cls(0xac7e04)
@classmethod
def green_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x544e03``."""
return cls(0x544e03)
@classmethod
def pea_soup(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x929901``."""
return cls(0x929901)
@classmethod
def orange_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffad01``."""
return cls(0xffad01)
@classmethod
def dull_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x84597e``."""
return cls(0x84597e)
@classmethod
def macaroni_and_cheese(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xefb435``."""
return cls(0xefb435)
@classmethod
def pale_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xeecffe``."""
return cls(0xeecffe)
@classmethod
def light_seafoam_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa7ffb5``."""
return cls(0xa7ffb5)
@classmethod
def auburn(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9a3001``."""
return cls(0x9a3001)
@classmethod
def electric_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x21fc0d``."""
return cls(0x21fc0d)
@classmethod
def dark_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb5485d``."""
return cls(0xb5485d)
@classmethod
def grass_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3f9b0b``."""
return cls(0x3f9b0b)
@classmethod
def greenish_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x00fbb0``."""
return cls(0x00fbb0)
@classmethod
def brown_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb96902``."""
return cls(0xb96902)
@classmethod
def deep_sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0d75f8``."""
return cls(0x0d75f8)
@classmethod
def dark_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x363737``."""
return cls(0x363737)
@classmethod
def shit_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7b5804``."""
return cls(0x7b5804)
@classmethod
def bluey_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6241c7``."""
return cls(0x6241c7)
@classmethod
def bright_aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0bf9ea``."""
return cls(0x0bf9ea)
@classmethod
def off_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6ba353``."""
return cls(0x6ba353)
@classmethod
def orange_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff6f52``."""
return cls(0xff6f52)
@classmethod
def deep_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x017374``."""
return cls(0x017374)
@classmethod
def blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0343df``."""
return cls(0x0343df)
@classmethod
def sunflower(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffc512``."""
return cls(0xffc512)
@classmethod
def dark_forest_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x002d04``."""
return cls(0x002d04)
@classmethod
def teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x029386``."""
return cls(0x029386)
@classmethod
def dirty_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xca7b80``."""
return cls(0xca7b80)
@classmethod
def french_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x436bad``."""
return cls(0x436bad)
@classmethod
def wine_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7b0323``."""
return cls(0x7b0323)
@classmethod
def light_indigo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6d5acf``."""
return cls(0x6d5acf)
@classmethod
def bluish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2976bb``."""
return cls(0x2976bb)
@classmethod
def baby_shit_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x889717``."""
return cls(0x889717)
@classmethod
def squash(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf2ab15``."""
return cls(0xf2ab15)
@classmethod
def cobalt_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x030aa7``."""
return cls(0x030aa7)
@classmethod
def greyish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5e819d``."""
return cls(0x5e819d)
@classmethod
def lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaaff32``."""
return cls(0xaaff32)
@classmethod
def blue_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x137e6d``."""
return cls(0x137e6d)
@classmethod
def very_light_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf6cefc``."""
return cls(0xf6cefc)
@classmethod
def blue_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x607c8e``."""
return cls(0x607c8e)
@classmethod
def bright_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01f9c6``."""
return cls(0x01f9c6)
@classmethod
def tealish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x24bca8``."""
return cls(0x24bca8)
@classmethod
def very_pale_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcffdbc``."""
return cls(0xcffdbc)
@classmethod
def greeny_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc6f808``."""
return cls(0xc6f808)
@classmethod
def sand_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcba560``."""
return cls(0xcba560)
@classmethod
def pine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2b5d34``."""
return cls(0x2b5d34)
@classmethod
def dandelion(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfedf08``."""
return cls(0xfedf08)
@classmethod
def pale_olive(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb9cc81``."""
return cls(0xb9cc81)
@classmethod
def swamp_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x748500``."""
return cls(0x748500)
@classmethod
def brick_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8f1402``."""
return cls(0x8f1402)
@classmethod
def greenish_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcdfd02``."""
return cls(0xcdfd02)
@classmethod
def tree_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2a7e19``."""
return cls(0x2a7e19)
@classmethod
def poop(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f5e00``."""
return cls(0x7f5e00)
@classmethod
def blue_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2242c7``."""
return cls(0x2242c7)
@classmethod
def brown_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8d8468``."""
return cls(0x8d8468)
@classmethod
def neon_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbc13fe``."""
return cls(0xbc13fe)
@classmethod
def dark_olive(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x373e02``."""
return cls(0x373e02)
@classmethod
def bright_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe01b1``."""
return cls(0xfe01b1)
@classmethod
def light_moss_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa6c875``."""
return cls(0xa6c875)
@classmethod
def lemon_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbffe28``."""
return cls(0xbffe28)
@classmethod
def deep_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc74767``."""
return cls(0xc74767)
@classmethod
def dark_mauve(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x874c62``."""
return cls(0x874c62)
@classmethod
def purple_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x673a3f``."""
return cls(0x673a3f)
@classmethod
def dark_lime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7ebd01``."""
return cls(0x7ebd01)
@classmethod
def soft_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdb0c0``."""
return cls(0xfdb0c0)
@classmethod
def chocolate(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3d1c02``."""
return cls(0x3d1c02)
@classmethod
def grape_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5d1451``."""
return cls(0x5d1451)
@classmethod
def purple_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x990147``."""
return cls(0x990147)
@classmethod
def greenish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x40a368``."""
return cls(0x40a368)
@classmethod
def cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x00ffff``."""
return cls(0x00ffff)
@classmethod
def dark_pastel_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x56ae57``."""
return cls(0x56ae57)
@classmethod
def pale_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd767ad``."""
return cls(0xd767ad)
@classmethod
def shit_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x758000``."""
return cls(0x758000)
@classmethod
def faded_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf0944d``."""
return cls(0xf0944d)
@classmethod
def light_green_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x56fca2``."""
return cls(0x56fca2)
@classmethod
def pastel_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa2bffe``."""
return cls(0xa2bffe)
@classmethod
def terracotta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xca6641``."""
return cls(0xca6641)
@classmethod
def purpleish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6140ef``."""
return cls(0x6140ef)
@classmethod
def ice_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd7fffe``."""
return cls(0xd7fffe)
@classmethod
def dark_mint_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x20c073``."""
return cls(0x20c073)
@classmethod
def water_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0e87cc``."""
return cls(0x0e87cc)
@classmethod
def light_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffe7a``."""
return cls(0xfffe7a)
@classmethod
def pinkish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb17261``."""
return cls(0xb17261)
@classmethod
def off_white(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffe4``."""
return cls(0xffffe4)
@classmethod
def greyish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x82a67d``."""
return cls(0x82a67d)
@classmethod
def fluorescent_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x08ff08``."""
return cls(0x08ff08)
@classmethod
def deep_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdc4d01``."""
return cls(0xdc4d01)
@classmethod
def medium_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7d7f7c``."""
return cls(0x7d7f7c)
@classmethod
def white(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffff``."""
return cls(0xffffff)
@classmethod
def lime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x89fe05``."""
return cls(0x89fe05)
@classmethod
def merlot(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x730039``."""
return cls(0x730039)
@classmethod
def desert(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xccad60``."""
return cls(0xccad60)
@classmethod
def lipstick_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc0022f``."""
return cls(0xc0022f)
@classmethod
def strawberry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfb2943``."""
return cls(0xfb2943)
@classmethod
def pale_aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb8ffeb``."""
return cls(0xb8ffeb)
@classmethod
def sandy_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc4a661``."""
return cls(0xc4a661)
@classmethod
def lemon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdff52``."""
return cls(0xfdff52)
@classmethod
def minty_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0bf77d``."""
return cls(0x0bf77d)
@classmethod
def dark_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x84b701``."""
return cls(0x84b701)
@classmethod
def gunmetal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x536267``."""
return cls(0x536267)
@classmethod
def darkish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x014182``."""
return cls(0x014182)
@classmethod
def periwinkle(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8e82fe``."""
return cls(0x8e82fe)
@classmethod
def sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x75bbfd``."""
return cls(0x75bbfd)
@classmethod
def navy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01153e``."""
return cls(0x01153e)
@classmethod
def blue_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5729ce``."""
return cls(0x5729ce)
@classmethod
def pale_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffff84``."""
return cls(0xffff84)
@classmethod
def orangish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf43605``."""
return cls(0xf43605)
@classmethod
def dark_khaki(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9b8f55``."""
return cls(0x9b8f55)
@classmethod
def powder_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb1d1fc``."""
return cls(0xb1d1fc)
@classmethod
def blue_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5d06e9``."""
return cls(0x5d06e9)
@classmethod
def sickly_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd0e429``."""
return cls(0xd0e429)
@classmethod
def slime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x99cc04``."""
return cls(0x99cc04)
@classmethod
def sickly_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x94b21c``."""
return cls(0x94b21c)
@classmethod
def brownish_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb7723``."""
return cls(0xcb7723)
@classmethod
def aubergine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3d0734``."""
return cls(0x3d0734)
@classmethod
def forest(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0b5509``."""
return cls(0x0b5509)
@classmethod
def light_olive_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa4be5c``."""
return cls(0xa4be5c)
@classmethod
def dirty_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3f829d``."""
return cls(0x3f829d)
@classmethod
def purplish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb0054b``."""
return cls(0xb0054b)
@classmethod
def parchment(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfefcaf``."""
return cls(0xfefcaf)
@classmethod
def cornflower_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5170d7``."""
return cls(0x5170d7)
@classmethod
def yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffff14``."""
return cls(0xffff14)
@classmethod
def dark_taupe(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f684e``."""
return cls(0x7f684e)
@classmethod
def deep_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x490648``."""
return cls(0x490648)
@classmethod
def dark_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x045c5a``."""
return cls(0x045c5a)
@classmethod
def dirt_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x836539``."""
return cls(0x836539)
@classmethod
def indigo_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3a18b1``."""
return cls(0x3a18b1)
@classmethod
def light_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffc5cb``."""
return cls(0xffc5cb)
@classmethod
def sea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x53fca1``."""
return cls(0x53fca1)
@classmethod
def muted_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5fa052``."""
return cls(0x5fa052)
@classmethod
def terra_cotta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc9643b``."""
return cls(0xc9643b)
@classmethod
def candy_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff63e9``."""
return cls(0xff63e9)
@classmethod
def ugly_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd0c101``."""
return cls(0xd0c101)
@classmethod
def darkish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x751973``."""
return cls(0x751973)
@classmethod
def stone(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xada587``."""
return cls(0xada587)
@classmethod
def pink_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf5054f``."""
return cls(0xf5054f)
@classmethod
def seaweed(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x18d17b``."""
return cls(0x18d17b)
@classmethod
def reddish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc44240``."""
return cls(0xc44240)
@classmethod
def earth(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa2653e``."""
return cls(0xa2653e)
@classmethod
def maroon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x650021``."""
return cls(0x650021)
@classmethod
def putty(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbeae8a``."""
return cls(0xbeae8a)
@classmethod
def muddy_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbfac05``."""
return cls(0xbfac05)
@classmethod
def very_light_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd1ffbd``."""
return cls(0xd1ffbd)
@classmethod
def mustard_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd2bd0a``."""
return cls(0xd2bd0a)
@classmethod
def bright_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbe03fd``."""
return cls(0xbe03fd)
@classmethod
def purplish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x94568c``."""
return cls(0x94568c)
@classmethod
def kermit_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5cb200``."""
return cls(0x5cb200)
@classmethod
def raw_sienna(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9a6200``."""
return cls(0x9a6200)
@classmethod
def orchid(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc875c4``."""
return cls(0xc875c4)
@classmethod
def darkish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x287c37``."""
return cls(0x287c37)
@classmethod
def yellowish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfaee66``."""
return cls(0xfaee66)
@classmethod
def dark_yellow_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x728f02``."""
return cls(0x728f02)
@classmethod
def butter_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffd74``."""
return cls(0xfffd74)
@classmethod
def celery(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc1fd95``."""
return cls(0xc1fd95)
@classmethod
def tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd1b26f``."""
return cls(0xd1b26f)
@classmethod
def denim_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3b5b92``."""
return cls(0x3b5b92)
@classmethod
def pale_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffcfdc``."""
return cls(0xffcfdc)
@classmethod
def medium_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f5112``."""
return cls(0x7f5112)
@classmethod
def clay_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb2713d``."""
return cls(0xb2713d)
@classmethod
def leather(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac7434``."""
return cls(0xac7434)
@classmethod
def shit(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f5f00``."""
return cls(0x7f5f00)
@classmethod
def adobe(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbd6c48``."""
return cls(0xbd6c48)
@classmethod
def lavender_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8b88f8``."""
return cls(0x8b88f8)
@classmethod
def slate_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x658d6d``."""
return cls(0x658d6d)
@classmethod
def very_dark_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x000133``."""
return cls(0x000133)
@classmethod
def midnight_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x020035``."""
return cls(0x020035)
@classmethod
def light_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x95d0fc``."""
return cls(0x95d0fc)
@classmethod
def canary(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdff63``."""
return cls(0xfdff63)
@classmethod
def greyish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa8a495``."""
return cls(0xa8a495)
@classmethod
def army_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4b5d16``."""
return cls(0x4b5d16)
@classmethod
def sap_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5c8b15``."""
return cls(0x5c8b15)
@classmethod
def ivory(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffcb``."""
return cls(0xffffcb)
@classmethod
def darkish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa90308``."""
return cls(0xa90308)
@classmethod
def robin_egg_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8af1fe``."""
return cls(0x8af1fe)
@classmethod
def light_bright_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x53fe5c``."""
return cls(0x53fe5c)
@classmethod
def deep_aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x08787f``."""
return cls(0x08787f)
@classmethod
def pumpkin_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfb7d07``."""
return cls(0xfb7d07)
@classmethod
def sage(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x87ae73``."""
return cls(0x87ae73)
@classmethod
def ochre(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbf9005``."""
return cls(0xbf9005)
@classmethod
def gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdbb40c``."""
return cls(0xdbb40c)
@classmethod
def dark_grey_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x29465b``."""
return cls(0x29465b)
@classmethod
def grey_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc3909b``."""
return cls(0xc3909b)
@classmethod
def dark_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x840000``."""
return cls(0x840000)
@classmethod
def orange_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbe6400``."""
return cls(0xbe6400)
@classmethod
def teal_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x25a36f``."""
return cls(0x25a36f)
@classmethod
def greyish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x887191``."""
return cls(0x887191)
@classmethod
def creme(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffb6``."""
return cls(0xffffb6)
@classmethod
def bright_light_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2dfe54``."""
return cls(0x2dfe54)
@classmethod
def muted_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd1768f``."""
return cls(0xd1768f)
@classmethod
def dark_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x014d4e``."""
return cls(0x014d4e)
@classmethod
def faded_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xde9dac``."""
return cls(0xde9dac)
@classmethod
def apple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6ecb3c``."""
return cls(0x6ecb3c)
@classmethod
def ocher(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbf9b0c``."""
return cls(0xbf9b0c)
@classmethod
def dusky_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcc7a8b``."""
return cls(0xcc7a8b)
@classmethod
def pale_peach(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffe5ad``."""
return cls(0xffe5ad)
@classmethod
def ocean_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3d9973``."""
return cls(0x3d9973)
@classmethod
def bright_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0165fc``."""
return cls(0x0165fc)
@classmethod
def bright_olive(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9cbb04``."""
return cls(0x9cbb04)
@classmethod
def bright_turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0ffef9``."""
return cls(0x0ffef9)
@classmethod
def almost_black(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x070d0d``."""
return cls(0x070d0d)
@classmethod
def lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc79fef``."""
return cls(0xc79fef)
@classmethod
def cobalt(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1e488f``."""
return cls(0x1e488f)
@classmethod
def pastel_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffbacd``."""
return cls(0xffbacd)
@classmethod
def ugly_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa442a0``."""
return cls(0xa442a0)
@classmethod
def poison_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x40fd14``."""
return cls(0x40fd14)
@classmethod
def dark_peach(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xde7e5d``."""
return cls(0xde7e5d)
@classmethod
def aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x13eac9``."""
return cls(0x13eac9)
@classmethod
def cement(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa5a391``."""
return cls(0xa5a391)
@classmethod
def eggshell_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc4fff7``."""
return cls(0xc4fff7)
@classmethod
def hot_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x25ff29``."""
return cls(0x25ff29)
@classmethod
def berry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x990f4b``."""
return cls(0x990f4b)
@classmethod
def indian_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x850e04``."""
return cls(0x850e04)
@classmethod
def deep_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x410200``."""
return cls(0x410200)
@classmethod
def heather(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa484ac``."""
return cls(0xa484ac)
@classmethod
def fawn(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcfaf7b``."""
return cls(0xcfaf7b)
@classmethod
def pear(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcbf85f``."""
return cls(0xcbf85f)
@classmethod
def pure_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0203e2``."""
return cls(0x0203e2)
@classmethod
def greeny_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7ea07a``."""
return cls(0x7ea07a)
@classmethod
def light_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdaa48``."""
return cls(0xfdaa48)
@classmethod
def light_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdfc5fe``."""
return cls(0xdfc5fe)
@classmethod
def dried_blood(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4b0101``."""
return cls(0x4b0101)
@classmethod
def banana_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfafe4b``."""
return cls(0xfafe4b)
@classmethod
def carnation(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd798f``."""
return cls(0xfd798f)
@classmethod
def tiffany_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7bf2da``."""
return cls(0x7bf2da)
@classmethod
def light_sage(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbcecac``."""
return cls(0xbcecac)
@classmethod
def light_pea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc4fe82``."""
return cls(0xc4fe82)
@classmethod
def grey_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x789b73``."""
return cls(0x789b73)
@classmethod
def muddy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x657432``."""
return cls(0x657432)
@classmethod
def hazel(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8e7618``."""
return cls(0x8e7618)
@classmethod
def pink_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xef1de7``."""
return cls(0xef1de7)
@classmethod
def very_light_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd5ffff``."""
return cls(0xd5ffff)
@classmethod
def lipstick(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd5174e``."""
return cls(0xd5174e)
@classmethod
def dark_sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x448ee4``."""
return cls(0x448ee4)
@classmethod
def bright_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff5b00``."""
return cls(0xff5b00)
@classmethod
def carolina_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8ab8fe``."""
return cls(0x8ab8fe)
@classmethod
def mulberry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x920a4e``."""
return cls(0x920a4e)
@classmethod
def twilight_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0a437a``."""
return cls(0x0a437a)
@classmethod
def kiwi(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9cef43``."""
return cls(0x9cef43)
@classmethod
def algae(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x54ac68``."""
return cls(0x54ac68)
@classmethod
def light_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffd1df``."""
return cls(0xffd1df)
@classmethod
def spearmint(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1ef876``."""
return cls(0x1ef876)
@classmethod
def pale_gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdde6c``."""
return cls(0xfdde6c)
@classmethod
def pale_light_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb1fc99``."""
return cls(0xb1fc99)
@classmethod
def bluish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x10a674``."""
return cls(0x10a674)
@classmethod
def periwinkle_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8f99fb``."""
return cls(0x8f99fb)
@classmethod
def green_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb5ce08``."""
return cls(0xb5ce08)
@classmethod
def sienna(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa9561e``."""
return cls(0xa9561e)
@classmethod
def carmine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9d0216``."""
return cls(0x9d0216)
@classmethod
def snot_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9dc100``."""
return cls(0x9dc100)
@classmethod
def aqua_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02d8e9``."""
return cls(0x02d8e9)
@classmethod
def purple_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd725de``."""
return cls(0xd725de)
@classmethod
def muted_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x805b87``."""
return cls(0x805b87)
@classmethod
def deep_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x040273``."""
return cls(0x040273)
@classmethod
def irish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x019529``."""
return cls(0x019529)
@classmethod
def deep_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x00555a``."""
return cls(0x00555a)
@classmethod
def pale_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd0fefe``."""
return cls(0xd0fefe)
@classmethod
def deep_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa0025c``."""
return cls(0xa0025c)
@classmethod
def darkblue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x030764``."""
return cls(0x030764)
@classmethod
def seafoam(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x80f9ad``."""
return cls(0x80f9ad)
@classmethod
def muddy_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x886806``."""
return cls(0x886806)
@classmethod
def ocean_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x03719c``."""
return cls(0x03719c)
@classmethod
def bluey_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2bb179``."""
return cls(0x2bb179)
@classmethod
def dark_sea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x11875d``."""
return cls(0x11875d)
@classmethod
def fern_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x548d44``."""
return cls(0x548d44)
@classmethod
def green_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x77926f``."""
return cls(0x77926f)
@classmethod
def lime_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd0fe1d``."""
return cls(0xd0fe1d)
@classmethod
def electric_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa8ff04``."""
return cls(0xa8ff04)
@classmethod
def pea(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa4bf20``."""
return cls(0xa4bf20)
@classmethod
def bluish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x703be7``."""
return cls(0x703be7)
@classmethod
def poo_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x885f01``."""
return cls(0x885f01)
@classmethod
def old_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc77986``."""
return cls(0xc77986)
@classmethod
def fern(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x63a950``."""
return cls(0x63a950)
@classmethod
def eggplant_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x430541``."""
return cls(0x430541)
@classmethod
def drab_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x749551``."""
return cls(0x749551)
@classmethod
def baby_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8cff9e``."""
return cls(0x8cff9e)
@classmethod
def moss_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x658b38``."""
return cls(0x658b38)
@classmethod
def spruce(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0a5f38``."""
return cls(0x0a5f38)
@classmethod
def light_yellowish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc2ff89``."""
return cls(0xc2ff89)
@classmethod
def light_beige(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffeb6``."""
return cls(0xfffeb6)
@classmethod
def neon_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x04d9ff``."""
return cls(0x04d9ff)
@classmethod
def dusty_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf0833a``."""
return cls(0xf0833a)
@classmethod
def light_royal_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3a2efe``."""
return cls(0x3a2efe)
@classmethod
def reddish_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf8481c``."""
return cls(0xf8481c)
@classmethod
def british_racing_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x05480d``."""
return cls(0x05480d)
@classmethod
def sea_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x047495``."""
return cls(0x047495)
@classmethod
def true_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x010fcc``."""
return cls(0x010fcc)
@classmethod
def brownish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9c6d57``."""
return cls(0x9c6d57)
@classmethod
def yellow_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfcb001``."""
return cls(0xfcb001)
@classmethod
def light_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x96f97b``."""
return cls(0x96f97b)
@classmethod
def dark_seafoam_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3eaf76``."""
return cls(0x3eaf76)
@classmethod
def light_maroon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa24857``."""
return cls(0xa24857)
@classmethod
def pinkish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd648d7``."""
return cls(0xd648d7)
@classmethod
def bland(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xafa88b``."""
return cls(0xafa88b)
@classmethod
def brownish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6a6e09``."""
return cls(0x6a6e09)
@classmethod
def greenish_cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2afeb7``."""
return cls(0x2afeb7)
@classmethod
def pale_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x82cbb2``."""
return cls(0x82cbb2)
@classmethod
def light_bluish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x76fda8``."""
return cls(0x76fda8)
@classmethod
def mint_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8fff9f``."""
return cls(0x8fff9f)
@classmethod
def amber(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfeb308``."""
return cls(0xfeb308)
@classmethod
def rusty_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaf2f0d``."""
return cls(0xaf2f0d)
@classmethod
def rich_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x720058``."""
return cls(0x720058)
@classmethod
def sunshine_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffd37``."""
return cls(0xfffd37)
@classmethod
def blue_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0f9b8e``."""
return cls(0x0f9b8e)
@classmethod
def rich_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x021bf9``."""
return cls(0x021bf9)
@classmethod
def terracota(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb6843``."""
return cls(0xcb6843)
@classmethod
def purple_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe03fd8``."""
return cls(0xe03fd8)
@classmethod
def light_mint(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb6ffbb``."""
return cls(0xb6ffbb)
@classmethod
def green_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01c08d``."""
return cls(0x01c08d)
@classmethod
def dark_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x856798``."""
return cls(0x856798)
@classmethod
def cherry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcf0234``."""
return cls(0xcf0234)
@classmethod
def saffron(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfeb209``."""
return cls(0xfeb209)
@classmethod
def greenish_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbccb7a``."""
return cls(0xbccb7a)
@classmethod
def dark_beige(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xac9362``."""
return cls(0xac9362)
@classmethod
def mid_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x276ab3``."""
return cls(0x276ab3)
@classmethod
def rosa(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe86a4``."""
return cls(0xfe86a4)
@classmethod
def red_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x820747``."""
return cls(0x820747)
@classmethod
def magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc20078``."""
return cls(0xc20078)
@classmethod
def dusk_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x26538d``."""
return cls(0x26538d)
@classmethod
def orangish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfc824a``."""
return cls(0xfc824a)
@classmethod
def moss(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x769958``."""
return cls(0x769958)
@classmethod
def dark_navy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x000435``."""
return cls(0x000435)
@classmethod
def lemon_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xadf802``."""
return cls(0xadf802)
@classmethod
def spring_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa9f971``."""
return cls(0xa9f971)
@classmethod
def jade_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2baf6a``."""
return cls(0x2baf6a)
@classmethod
def purpleish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x98568d``."""
return cls(0x98568d)
@classmethod
def hot_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff028d``."""
return cls(0xff028d)
@classmethod
def electric_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff0490``."""
return cls(0xff0490)
@classmethod
def shocking_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe02a2``."""
return cls(0xfe02a2)
@classmethod
def goldenrod(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfac205``."""
return cls(0xfac205)
@classmethod
def puce(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa57e52``."""
return cls(0xa57e52)
@classmethod
def dark_salmon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc85a53``."""
return cls(0xc85a53)
@classmethod
def pale_lime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb1ff65``."""
return cls(0xb1ff65)
@classmethod
def pale_lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe4cbff``."""
return cls(0xe4cbff)
@classmethod
def faded_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x658cbb``."""
return cls(0x658cbb)
@classmethod
def light_navy(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x155084``."""
return cls(0x155084)
@classmethod
def burnt_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd5ab09``."""
return cls(0xd5ab09)
@classmethod
def dodger_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3e82fc``."""
return cls(0x3e82fc)
@classmethod
def jungle_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x048243``."""
return cls(0x048243)
@classmethod
def red_wine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8c0034``."""
return cls(0x8c0034)
@classmethod
def primary_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0804f9``."""
return cls(0x0804f9)
@classmethod
def grass(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5cac2d``."""
return cls(0x5cac2d)
@classmethod
def ocean(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x017b92``."""
return cls(0x017b92)
@classmethod
def cranberry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9e003a``."""
return cls(0x9e003a)
@classmethod
def dark_blue_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1f3b4d``."""
return cls(0x1f3b4d)
@classmethod
def very_pale_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd6fffe``."""
return cls(0xd6fffe)
@classmethod
def medium_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x39ad48``."""
return cls(0x39ad48)
@classmethod
def bright_sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02ccfe``."""
return cls(0x02ccfe)
@classmethod
def grapefruit(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfd5956``."""
return cls(0xfd5956)
@classmethod
def camo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f8f4e``."""
return cls(0x7f8f4e)
@classmethod
def turquoise_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x04f489``."""
return cls(0x04f489)
@classmethod
def dark_green_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1f6357``."""
return cls(0x1f6357)
@classmethod
def royal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0c1793``."""
return cls(0x0c1793)
@classmethod
def tomato(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xef4026``."""
return cls(0xef4026)
@classmethod
def avocado_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x87a922``."""
return cls(0x87a922)
@classmethod
def seafoam_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7af9ab``."""
return cls(0x7af9ab)
@classmethod
def dark_plum(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3f012c``."""
return cls(0x3f012c)
@classmethod
def ruby(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xca0147``."""
return cls(0xca0147)
@classmethod
def brick_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc14a09``."""
return cls(0xc14a09)
@classmethod
def greenblue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x23c48b``."""
return cls(0x23c48b)
@classmethod
def purplish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6b4247``."""
return cls(0x6b4247)
@classmethod
def mahogany(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4a0100``."""
return cls(0x4a0100)
@classmethod
def medium_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf36196``."""
return cls(0xf36196)
@classmethod
def greenish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0b8b87``."""
return cls(0x0b8b87)
@classmethod
def robins_egg_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x98eff9``."""
return cls(0x98eff9)
@classmethod
def greenish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x696112``."""
return cls(0x696112)
@classmethod
def purplish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x601ef9``."""
return cls(0x601ef9)
@classmethod
def baby_shit_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad900d``."""
return cls(0xad900d)
@classmethod
def ugly_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x31668a``."""
return cls(0x31668a)
@classmethod
def yellowish_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffab0f``."""
return cls(0xffab0f)
@classmethod
def avocado(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x90b134``."""
return cls(0x90b134)
@classmethod
def frog_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x58bc08``."""
return cls(0x58bc08)
@classmethod
def grey_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x86a17d``."""
return cls(0x86a17d)
@classmethod
def yellowy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbff128``."""
return cls(0xbff128)
@classmethod
def navy_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x001146``."""
return cls(0x001146)
@classmethod
def light_aqua(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8cffdb``."""
return cls(0x8cffdb)
@classmethod
def pale_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc7fdb5``."""
return cls(0xc7fdb5)
@classmethod
def pale_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xceaefa``."""
return cls(0xceaefa)
@classmethod
def faded_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd3494e``."""
return cls(0xd3494e)
@classmethod
def reddish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe2c54``."""
return cls(0xfe2c54)
@classmethod
def light_seafoam(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa0febf``."""
return cls(0xa0febf)
@classmethod
def forrest_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x154406``."""
return cls(0x154406)
@classmethod
def very_light_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfff4f2``."""
return cls(0xfff4f2)
@classmethod
def prussian_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x004577``."""
return cls(0x004577)
@classmethod
def heliotrope(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd94ff5``."""
return cls(0xd94ff5)
@classmethod
def pale(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfff9d0``."""
return cls(0xfff9d0)
@classmethod
def algae_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x21c36f``."""
return cls(0x21c36f)
@classmethod
def olive_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x645403``."""
return cls(0x645403)
@classmethod
def barbie_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe46a5``."""
return cls(0xfe46a5)
@classmethod
def vomit_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x89a203``."""
return cls(0x89a203)
@classmethod
def soft_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6488ea``."""
return cls(0x6488ea)
@classmethod
def vibrant_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0339f8``."""
return cls(0x0339f8)
@classmethod
def brownish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9e3623``."""
return cls(0x9e3623)
@classmethod
def evergreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x05472a``."""
return cls(0x05472a)
@classmethod
def bright_cyan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x41fdfe``."""
return cls(0x41fdfe)
@classmethod
def night_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x040348``."""
return cls(0x040348)
@classmethod
def deep_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb0162``."""
return cls(0xcb0162)
@classmethod
def tealish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0cdc73``."""
return cls(0x0cdc73)
@classmethod
def light_sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc6fcff``."""
return cls(0xc6fcff)
@classmethod
def neon_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0cff0c``."""
return cls(0x0cff0c)
@classmethod
def blurple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5539cc``."""
return cls(0x5539cc)
@classmethod
def weird_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3ae57f``."""
return cls(0x3ae57f)
@classmethod
def dirty_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x734a65``."""
return cls(0x734a65)
@classmethod
def light_lime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb9ff66``."""
return cls(0xb9ff66)
@classmethod
def dark_seafoam(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1fb57a``."""
return cls(0x1fb57a)
@classmethod
def reddish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x910951``."""
return cls(0x910951)
@classmethod
def bright_yellow_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9dff00``."""
return cls(0x9dff00)
@classmethod
def rouge(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xab1239``."""
return cls(0xab1239)
@classmethod
def raw_umber(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa75e09``."""
return cls(0xa75e09)
@classmethod
def plum_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4e0550``."""
return cls(0x4e0550)
@classmethod
def green_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0cb577``."""
return cls(0x0cb577)
@classmethod
def red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe50000``."""
return cls(0xe50000)
@classmethod
def booger(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9bb53c``."""
return cls(0x9bb53c)
@classmethod
def pumpkin(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe17701``."""
return cls(0xe17701)
@classmethod
def purpley_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5f34e7``."""
return cls(0x5f34e7)
@classmethod
def dull_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd8863b``."""
return cls(0xd8863b)
@classmethod
def dull_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbb3f3f``."""
return cls(0xbb3f3f)
@classmethod
def pinkish(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xd46a7e``."""
return cls(0xd46a7e)
@classmethod
def purpley_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc83cb9``."""
return cls(0xc83cb9)
@classmethod
def light_blue_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb7c9e2``."""
return cls(0xb7c9e2)
@classmethod
def deep_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x36013f``."""
return cls(0x36013f)
@classmethod
def faded_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfeff7f``."""
return cls(0xfeff7f)
@classmethod
def forest_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x06470c``."""
return cls(0x06470c)
@classmethod
def lighter_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x75fd63``."""
return cls(0x75fd63)
@classmethod
def dark_periwinkle(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x665fd1``."""
return cls(0x665fd1)
@classmethod
def dull_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x74a662``."""
return cls(0x74a662)
@classmethod
def black(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x000000``."""
return cls(0x000000)
@classmethod
def deep_lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x966ebd``."""
return cls(0x966ebd)
@classmethod
def old_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc87f89``."""
return cls(0xc87f89)
@classmethod
def light_forest_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4f9153``."""
return cls(0x4f9153)
@classmethod
def seafoam_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x78d1b6``."""
return cls(0x78d1b6)
@classmethod
def bright_lime_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x65fe08``."""
return cls(0x65fe08)
@classmethod
def manilla(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffa86``."""
return cls(0xfffa86)
@classmethod
def light_greenish_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x63f7b4``."""
return cls(0x63f7b4)
@classmethod
def perrywinkle(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8f8ce7``."""
return cls(0x8f8ce7)
@classmethod
def bright_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff08e8``."""
return cls(0xff08e8)
@classmethod
def marine_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01386a``."""
return cls(0x01386a)
@classmethod
def green_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc9ff27``."""
return cls(0xc9ff27)
@classmethod
def mossy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x638b27``."""
return cls(0x638b27)
@classmethod
def turtle_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x75b84f``."""
return cls(0x75b84f)
@classmethod
def yellowish_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfcfc81``."""
return cls(0xfcfc81)
@classmethod
def coral(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfc5a50``."""
return cls(0xfc5a50)
@classmethod
def asparagus(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x77ab56``."""
return cls(0x77ab56)
@classmethod
def light_mauve(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc292a1``."""
return cls(0xc292a1)
@classmethod
def light_olive(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xacbf69``."""
return cls(0xacbf69)
@classmethod
def golden(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf5bf03``."""
return cls(0xf5bf03)
@classmethod
def flat_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3c73a8``."""
return cls(0x3c73a8)
@classmethod
def darkish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xda467d``."""
return cls(0xda467d)
@classmethod
def green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x15b01a``."""
return cls(0x15b01a)
@classmethod
def sepia(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x985e2b``."""
return cls(0x985e2b)
@classmethod
def ecru(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfeffca``."""
return cls(0xfeffca)
@classmethod
def greeny_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x696006``."""
return cls(0x696006)
@classmethod
def foam_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x90fda9``."""
return cls(0x90fda9)
@classmethod
def military_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x667c3e``."""
return cls(0x667c3e)
@classmethod
def rose_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf7879a``."""
return cls(0xf7879a)
@classmethod
def dark_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x00035b``."""
return cls(0x00035b)
@classmethod
def bubblegum(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff6cb5``."""
return cls(0xff6cb5)
@classmethod
def azul(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1d5dec``."""
return cls(0x1d5dec)
@classmethod
def leaf_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5ca904``."""
return cls(0x5ca904)
@classmethod
def scarlet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbe0119``."""
return cls(0xbe0119)
@classmethod
def blue_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x758da3``."""
return cls(0x758da3)
@classmethod
def yellowish_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb0dd16``."""
return cls(0xb0dd16)
@classmethod
def bright_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffd01``."""
return cls(0xfffd01)
@classmethod
def grape(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6c3461``."""
return cls(0x6c3461)
@classmethod
def banana(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffff7e``."""
return cls(0xffff7e)
@classmethod
def barney_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa00498``."""
return cls(0xa00498)
@classmethod
def light_blue_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7efbb3``."""
return cls(0x7efbb3)
@classmethod
def strong_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0c06f7``."""
return cls(0x0c06f7)
@classmethod
def light_urple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb36ff6``."""
return cls(0xb36ff6)
@classmethod
def bright_violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad0afd``."""
return cls(0xad0afd)
@classmethod
def purple_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x632de9``."""
return cls(0x632de9)
@classmethod
def highlighter_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1bfc06``."""
return cls(0x1bfc06)
@classmethod
def salmon_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe7b7c``."""
return cls(0xfe7b7c)
@classmethod
def light_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad8150``."""
return cls(0xad8150)
@classmethod
def bluegrey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x85a3b2``."""
return cls(0x85a3b2)
@classmethod
def darkgreen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x054907``."""
return cls(0x054907)
@classmethod
def lichen(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8fb67b``."""
return cls(0x8fb67b)
@classmethod
def egg_shell(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffcc4``."""
return cls(0xfffcc4)
@classmethod
def browny_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6f6c0a``."""
return cls(0x6f6c0a)
@classmethod
def brownish_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x76424e``."""
return cls(0x76424e)
@classmethod
def pinkish_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff724c``."""
return cls(0xff724c)
@classmethod
def pale_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb790d4``."""
return cls(0xb790d4)
@classmethod
def clear_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x247afd``."""
return cls(0x247afd)
@classmethod
def raspberry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb00149``."""
return cls(0xb00149)
@classmethod
def dusky_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xba6873``."""
return cls(0xba6873)
@classmethod
def ugly_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7a9703``."""
return cls(0x7a9703)
@classmethod
def cloudy_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xacc2d9``."""
return cls(0xacc2d9)
@classmethod
def bright_light_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x26f7fd``."""
return cls(0x26f7fd)
@classmethod
def dark_mint(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x48c072``."""
return cls(0x48c072)
@classmethod
def pinky_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfc2647``."""
return cls(0xfc2647)
@classmethod
def dusty_rose(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc0737a``."""
return cls(0xc0737a)
@classmethod
def lightish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe2f4a``."""
return cls(0xfe2f4a)
@classmethod
def yellow_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc0fb2d``."""
return cls(0xc0fb2d)
@classmethod
def pastel_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcaa0ff``."""
return cls(0xcaa0ff)
@classmethod
def yellowy_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xae8b0c``."""
return cls(0xae8b0c)
@classmethod
def rust_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaa2704``."""
return cls(0xaa2704)
@classmethod
def green_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x06b48b``."""
return cls(0x06b48b)
@classmethod
def light_salmon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfea993``."""
return cls(0xfea993)
@classmethod
def olive_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc2b709``."""
return cls(0xc2b709)
@classmethod
def pale_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbefd73``."""
return cls(0xbefd73)
@classmethod
def radioactive_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2cfa1f``."""
return cls(0x2cfa1f)
@classmethod
def light_lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xedc8ff``."""
return cls(0xedc8ff)
@classmethod
def teal_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01889f``."""
return cls(0x01889f)
@classmethod
def tea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbdf8a3``."""
return cls(0xbdf8a3)
@classmethod
def bronze(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa87900``."""
return cls(0xa87900)
@classmethod
def reddy_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6e1005``."""
return cls(0x6e1005)
@classmethod
def dark_grass_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x388004``."""
return cls(0x388004)
@classmethod
def peachy_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff9a8a``."""
return cls(0xff9a8a)
@classmethod
def dirty_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcdc50a``."""
return cls(0xcdc50a)
@classmethod
def tangerine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff9408``."""
return cls(0xff9408)
@classmethod
def deep_lavender(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8d5eb7``."""
return cls(0x8d5eb7)
@classmethod
def umber(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb26400``."""
return cls(0xb26400)
@classmethod
def olive_drab(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6f7632``."""
return cls(0x6f7632)
@classmethod
def baby_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xca9bf7``."""
return cls(0xca9bf7)
@classmethod
def cerise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xde0c62``."""
return cls(0xde0c62)
@classmethod
def melon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff7855``."""
return cls(0xff7855)
@classmethod
def burnt_sienna(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb04e0f``."""
return cls(0xb04e0f)
@classmethod
def vibrant_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0add08``."""
return cls(0x0add08)
@classmethod
def yellowish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9b7a01``."""
return cls(0x9b7a01)
@classmethod
def shamrock(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01b44c``."""
return cls(0x01b44c)
@classmethod
def brown_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb29705``."""
return cls(0xb29705)
@classmethod
def tan_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa9be70``."""
return cls(0xa9be70)
@classmethod
def dark_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x960056``."""
return cls(0x960056)
@classmethod
def purplish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xce5dae``."""
return cls(0xce5dae)
@classmethod
def grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x929591``."""
return cls(0x929591)
@classmethod
def mud_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x60460f``."""
return cls(0x60460f)
@classmethod
def pea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8eab12``."""
return cls(0x8eab12)
@classmethod
def pink_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdb4bda``."""
return cls(0xdb4bda)
@classmethod
def reddish_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7f2b0a``."""
return cls(0x7f2b0a)
@classmethod
def blush_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfe828c``."""
return cls(0xfe828c)
@classmethod
def light_lime(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xaefd6c``."""
return cls(0xaefd6c)
@classmethod
def hot_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf504c9``."""
return cls(0xf504c9)
@classmethod
def poop_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6f7c00``."""
return cls(0x6f7c00)
@classmethod
def swamp(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x698339``."""
return cls(0x698339)
@classmethod
def faded_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7bb274``."""
return cls(0x7bb274)
@classmethod
def yellow_ochre(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcb9d06``."""
return cls(0xcb9d06)
@classmethod
def dust(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb2996e``."""
return cls(0xb2996e)
@classmethod
def soft_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa66fb5``."""
return cls(0xa66fb5)
@classmethod
def light_lavendar(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xefc0fe``."""
return cls(0xefc0fe)
@classmethod
def dark_royal_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x02066f``."""
return cls(0x02066f)
@classmethod
def violet_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa50055``."""
return cls(0xa50055)
@classmethod
def rosy_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf6688e``."""
return cls(0xf6688e)
@classmethod
def lighter_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa55af4``."""
return cls(0xa55af4)
@classmethod
def eggshell(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffffd4``."""
return cls(0xffffd4)
@classmethod
def greyish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc88d94``."""
return cls(0xc88d94)
@classmethod
def russet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa13905``."""
return cls(0xa13905)
@classmethod
def purply(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x983fb2``."""
return cls(0x983fb2)
@classmethod
def red_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8b2e16``."""
return cls(0x8b2e16)
@classmethod
def off_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf1f33f``."""
return cls(0xf1f33f)
@classmethod
def warm_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4b57db``."""
return cls(0x4b57db)
@classmethod
def metallic_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4f738e``."""
return cls(0x4f738e)
@classmethod
def golden_rod(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf9bc08``."""
return cls(0xf9bc08)
@classmethod
def pale_olive_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb1d27b``."""
return cls(0xb1d27b)
@classmethod
def dusty_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb9484e``."""
return cls(0xb9484e)
@classmethod
def light_plum(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9d5783``."""
return cls(0x9d5783)
@classmethod
def lilac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xcea2fd``."""
return cls(0xcea2fd)
@classmethod
def dusky_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x895b7b``."""
return cls(0x895b7b)
@classmethod
def green_apple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5edc1f``."""
return cls(0x5edc1f)
@classmethod
def hospital_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9be5aa``."""
return cls(0x9be5aa)
@classmethod
def lavender_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xdd85d7``."""
return cls(0xdd85d7)
@classmethod
def light_grey_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb7e1a1``."""
return cls(0xb7e1a1)
@classmethod
def topaz(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x13bbaf``."""
return cls(0x13bbaf)
@classmethod
def dull_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x876e4b``."""
return cls(0x876e4b)
@classmethod
def steel(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x738595``."""
return cls(0x738595)
@classmethod
def rose_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbe013c``."""
return cls(0xbe013c)
@classmethod
def aquamarine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x04d8b2``."""
return cls(0x04d8b2)
@classmethod
def midnight_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x280137``."""
return cls(0x280137)
@classmethod
def grassy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x419c03``."""
return cls(0x419c03)
@classmethod
def charcoal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x343837``."""
return cls(0x343837)
@classmethod
def puke_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x947706``."""
return cls(0x947706)
@classmethod
def pinkish_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf10c45``."""
return cls(0xf10c45)
@classmethod
def cocoa(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x875f42``."""
return cls(0x875f42)
@classmethod
def baby_poo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xab9004``."""
return cls(0xab9004)
@classmethod
def orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf97306``."""
return cls(0xf97306)
@classmethod
def salmon(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff796c``."""
return cls(0xff796c)
@classmethod
def ugly_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7d7103``."""
return cls(0x7d7103)
@classmethod
def purple_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x866f85``."""
return cls(0x866f85)
@classmethod
def olive_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x677a04``."""
return cls(0x677a04)
@classmethod
def dull_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xeedc5b``."""
return cls(0xeedc5b)
@classmethod
def blueberry(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x464196``."""
return cls(0x464196)
@classmethod
def neon_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff073a``."""
return cls(0xff073a)
@classmethod
def peacock_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x016795``."""
return cls(0x016795)
@classmethod
def snot(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xacbb0d``."""
return cls(0xacbb0d)
@classmethod
def tea(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x65ab7c``."""
return cls(0x65ab7c)
@classmethod
def purple_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5d21d0``."""
return cls(0x5d21d0)
@classmethod
def liliac(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc48efd``."""
return cls(0xc48efd)
@classmethod
def easter_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc071fe``."""
return cls(0xc071fe)
@classmethod
def pale_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfdfdfe``."""
return cls(0xfdfdfe)
@classmethod
def electric_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0652ff``."""
return cls(0x0652ff)
@classmethod
def dark_mustard(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa88905``."""
return cls(0xa88905)
@classmethod
def pastel_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffe71``."""
return cls(0xfffe71)
@classmethod
def off_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5684ae``."""
return cls(0x5684ae)
@classmethod
def marine(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x042e60``."""
return cls(0x042e60)
@classmethod
def dark_navy_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x00022e``."""
return cls(0x00022e)
@classmethod
def blue_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5a06ef``."""
return cls(0x5a06ef)
@classmethod
def pale_sky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xbdf6fe``."""
return cls(0xbdf6fe)
@classmethod
def violet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9a0eea``."""
return cls(0x9a0eea)
@classmethod
def mustard_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa8b504``."""
return cls(0xa8b504)
@classmethod
def light_sea_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x98f6b0``."""
return cls(0x98f6b0)
@classmethod
def yellow_brown(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb79400``."""
return cls(0xb79400)
@classmethod
def pine_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0a481e``."""
return cls(0x0a481e)
@classmethod
def velvet(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x750851``."""
return cls(0x750851)
@classmethod
def navy_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x35530a``."""
return cls(0x35530a)
@classmethod
def custard(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfffd78``."""
return cls(0xfffd78)
@classmethod
def yellow_tan(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffe36e``."""
return cls(0xffe36e)
@classmethod
def poo(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8f7303``."""
return cls(0x8f7303)
@classmethod
def mud(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x735c12``."""
return cls(0x735c12)
@classmethod
def vermillion(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf4320c``."""
return cls(0xf4320c)
@classmethod
def copper(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xb66325``."""
return cls(0xb66325)
@classmethod
def easter_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x8cfd7e``."""
return cls(0x8cfd7e)
@classmethod
def sunflower_yellow(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xffda03``."""
return cls(0xffda03)
@classmethod
def dark_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x35063e``."""
return cls(0x35063e)
@classmethod
def brownish_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc27e79``."""
return cls(0xc27e79)
@classmethod
def emerald(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x01a049``."""
return cls(0x01a049)
@classmethod
def carnation_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xff7fa7``."""
return cls(0xff7fa7)
@classmethod
def dusky_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x475f94``."""
return cls(0x475f94)
@classmethod
def turquoise(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x06c2ac``."""
return cls(0x06c2ac)
@classmethod
def robins_egg(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x6dedfd``."""
return cls(0x6dedfd)
@classmethod
def sapphire(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2138ab``."""
return cls(0x2138ab)
@classmethod
def dusty_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4c9085``."""
return cls(0x4c9085)
@classmethod
def lawn_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x4da409``."""
return cls(0x4da409)
@classmethod
def cerulean(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x0485d1``."""
return cls(0x0485d1)
@classmethod
def sick_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9db92c``."""
return cls(0x9db92c)
@classmethod
def warm_pink(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xfb5581``."""
return cls(0xfb5581)
XKCDColor = XKCDColour
|
mgardne8/DiscordPyColours
|
discord/ext/colours/xkcd.py
|
Python
|
gpl-3.0
| 156,381
|
[
"Amber",
"VisIt"
] |
b086c06a107b0b10891d4cbd77e8ecb1ae99caa17564a4635fc99ea85b970a6e
|
"""
Tests for the Matomo template tags and filters.
"""
import pytest
from django.contrib.auth.models import User
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from utils import TagTestCase
from analytical.templatetags.matomo import MatomoNode
from analytical.utils import AnalyticalException
@override_settings(MATOMO_DOMAIN_PATH='example.com', MATOMO_SITE_ID='345')
class MatomoTagTestCase(TagTestCase):
"""
Tests for the ``matomo`` template tag.
"""
def test_tag(self):
r = self.render_tag('matomo', 'matomo')
assert '"//example.com/"' in r
assert "_paq.push(['setSiteId', 345]);" in r
assert 'img src="//example.com/piwik.php?idsite=345"' in r
def test_node(self):
r = MatomoNode().render(Context({}))
assert '"//example.com/";' in r
assert "_paq.push(['setSiteId', 345]);" in r
assert 'img src="//example.com/piwik.php?idsite=345"' in r
@override_settings(MATOMO_DOMAIN_PATH='example.com/matomo',
MATOMO_SITE_ID='345')
def test_domain_path_valid(self):
r = self.render_tag('matomo', 'matomo')
assert '"//example.com/matomo/"' in r
@override_settings(MATOMO_DOMAIN_PATH='example.com:1234',
MATOMO_SITE_ID='345')
def test_domain_port_valid(self):
r = self.render_tag('matomo', 'matomo')
assert '"//example.com:1234/";' in r
@override_settings(MATOMO_DOMAIN_PATH='example.com:1234/matomo',
MATOMO_SITE_ID='345')
def test_domain_port_path_valid(self):
r = self.render_tag('matomo', 'matomo')
assert '"//example.com:1234/matomo/"' in r
@override_settings(MATOMO_DOMAIN_PATH=None)
def test_no_domain(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_SITE_ID=None)
def test_no_siteid(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_SITE_ID='x')
def test_siteid_not_a_number(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='http://www.example.com')
def test_domain_protocol_invalid(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='example.com/')
def test_domain_slash_invalid(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='example.com:123:456')
def test_domain_multi_port(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='example.com:')
def test_domain_incomplete_port(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='example.com:/matomo')
def test_domain_uri_incomplete_port(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(MATOMO_DOMAIN_PATH='example.com:12df')
def test_domain_port_invalid(self):
with pytest.raises(AnalyticalException):
MatomoNode()
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = MatomoNode().render(context)
assert r.startswith('<!-- Matomo disabled on internal IP address')
assert r.endswith('-->')
def test_uservars(self):
context = Context({'matomo_vars': [(1, 'foo', 'foo_val'),
(2, 'bar', 'bar_val', 'page'),
(3, 'spam', 'spam_val', 'visit')]})
r = MatomoNode().render(context)
for var_code in ['_paq.push(["setCustomVariable", 1, "foo", "foo_val", "page"]);',
'_paq.push(["setCustomVariable", 2, "bar", "bar_val", "page"]);',
'_paq.push(["setCustomVariable", 3, "spam", "spam_val", "visit"]);']:
assert var_code in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_default_usertrack(self):
context = Context({
'user': User(username='BDFL', first_name='Guido', last_name='van Rossum')
})
r = MatomoNode().render(context)
var_code = '_paq.push(["setUserId", "BDFL"]);'
assert var_code in r
def test_matomo_usertrack(self):
context = Context({
'matomo_identity': 'BDFL'
})
r = MatomoNode().render(context)
var_code = '_paq.push(["setUserId", "BDFL"]);'
assert var_code in r
def test_analytical_usertrack(self):
context = Context({
'analytical_identity': 'BDFL'
})
r = MatomoNode().render(context)
var_code = '_paq.push(["setUserId", "BDFL"]);'
assert var_code in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_disable_usertrack(self):
context = Context({
'user': User(username='BDFL', first_name='Guido', last_name='van Rossum'),
'matomo_identity': None
})
r = MatomoNode().render(context)
var_code = '_paq.push(["setUserId", "BDFL"]);'
assert var_code not in r
@override_settings(MATOMO_DISABLE_COOKIES=True)
def test_disable_cookies(self):
r = MatomoNode().render(Context({}))
assert "_paq.push(['disableCookies']);" in r
|
jcassee/django-analytical
|
tests/unit/test_tag_matomo.py
|
Python
|
mit
| 5,650
|
[
"VisIt"
] |
ead0de0fd3f3ad08dbf5161ec3bc5581aa03d42fbf1d393f0e816dbe265af12a
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
iPOPO component factories repository
:author: Thomas Calmant
:license: Apache Software License 2.0
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
import ast
import logging
import threading
# Pelix
from pelix.utilities import is_string
from pelix.ipopo.decorators import ComponentFactory, Provides, Invalidate, \
Property, Requires, Validate
# Repository beans
import cohorte.repositories
from cohorte.repositories.beans import Factory
# ------------------------------------------------------------------------------
# Bundle version
import cohorte.version
__version__=cohorte.version.__version__
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class ComponentFactoryVisitor(ast.NodeVisitor):
"""
AST visitor to extract imports and version
"""
# pylint: disable=invalid-name
def __init__(self):
"""
Sets up the visitor
"""
ast.NodeVisitor.__init__(self)
self.factories = set()
self.values = {}
def generic_visit(self, node):
"""
Custom default visit method that avoids to visit further that the
module level.
"""
if type(node) is ast.Module:
ast.NodeVisitor.generic_visit(self, node)
def visit_ClassDef(self, node):
"""
Found a class definition
"""
for decorator in node.decorator_list:
try:
if decorator.func.id != "ComponentFactory":
# Not a ComponentFactory decorator
continue
except AttributeError:
# Not our kind of decorator
pass
else:
name = None
if decorator.args:
# Name: First argument
argument = decorator.args[0]
else:
argument = None
if hasattr(decorator, 'kwargs'):
# Before Python 3.5
if decorator.kwargs:
argument = decorator.kwargs.get('name')
elif hasattr(decorator, 'keywords'):
# Python 3.5: kwargs dictionary replaced by a list
# of keywords
for keyword in decorator.keywords:
if keyword.arg == 'name':
argument = keyword.value
if not argument:
# Default name
name = "{0}Factory".format(node.name)
if name is None:
if hasattr(argument, 'id'):
# Constant
try:
name = self.values[argument.id]
except KeyError:
_logger.debug("Factory name '%s' is unknown (%s)",
argument.id, node.name)
else:
# Literal
try:
name = ast.literal_eval(argument)
except (ValueError, SyntaxError) as ex:
_logger.debug(
"Invalid factory name for class %s: %s",
node.name, ex)
if name is not None:
# Store the factory name
self.factories.add(name)
def visit_Assign(self, node):
"""
Found an assignment
"""
field = getattr(node.targets[0], 'id', None)
if field:
try:
value = ast.literal_eval(node.value)
if is_string(value):
self.values[field] = value
except (ValueError, SyntaxError):
# Ignore errors
pass
def _extract_module_factories(filename):
"""
Extract the version and the imports from the given Python file
:param filename: Path to the file to parse
:return: A (version, [imports]) tuple
:raise ValueError: Unreadable file
"""
visitor = ComponentFactoryVisitor()
try:
with open(filename) as filep:
source = filep.read()
except (OSError, IOError) as ex:
raise ValueError("Error reading {0}: {1}".format(filename, ex))
try:
module = ast.parse(source, filename, 'exec')
except (ValueError, SyntaxError) as ex:
raise ValueError("Error parsing {0}: {1}".format(filename, ex))
try:
visitor.visit(module)
except Exception as ex:
raise ValueError("Error visiting {0}: {1}".format(filename, ex))
return visitor.factories
# ------------------------------------------------------------------------------
@ComponentFactory("cohorte-repository-factories-ipopo-factory")
@Provides(cohorte.repositories.SERVICE_REPOSITORY_FACTORIES,
controller="_controller")
@Requires('_repositories', cohorte.repositories.SERVICE_REPOSITORY_ARTIFACTS,
True, False,
"({0}=python)".format(cohorte.repositories.PROP_REPOSITORY_LANGUAGE))
@Property('_model', cohorte.repositories.PROP_FACTORY_MODEL, "ipopo")
@Property('_language', cohorte.repositories.PROP_REPOSITORY_LANGUAGE, "python")
class IPopoRepository(object):
"""
Represents a repository
"""
def __init__(self):
"""
Sets up the repository
"""
# Properties
self._model = 'ipopo'
self._language = 'python'
# Service controller
self._controller = False
# Injected service
self._repositories = []
# Name -> [Factories]
self._factories = {}
# Artifact -> [Factories]
self._artifacts = {}
# Some locking
self.__lock = threading.RLock()
def __contains__(self, item):
"""
Tests if the given item is in the repository
:param item: Item to be tested
:return: True if the item is in the repository
"""
if isinstance(item, Factory):
# Test artifact model
if item.model != self._model:
return False
# Test if the name is in the factories
return item.name in self._factories
elif item in self._factories:
# Item matches a factory name
return True
# No match
return False
def __len__(self):
"""
Length of a repository <=> number of individual factories
"""
return sum((len(factories) for factories in self._factories.values()))
def add_artifact(self, artifact):
"""
Adds the factories provided by the given artifact
:param artifact: A Python Module artifact
:raise ValueError: Unreadable file
"""
with self.__lock:
# Extract factories
names = _extract_module_factories(artifact.file)
artifact_list = self._artifacts.setdefault(artifact, [])
for name in names:
# Make the bean
factory = Factory(name, self._language, self._model, artifact)
# Factory
factory_list = self._factories.setdefault(name, [])
if factory not in factory_list:
factory_list.append(factory)
# Artifact
if factory not in artifact_list:
artifact_list.append(factory)
def clear(self):
"""
Clears the repository content
"""
with self.__lock:
self._artifacts.clear()
self._factories.clear()
def find_factories(self, factories):
"""
Returns the list of artifacts that provides the given factories
:param factories: A list of iPOPO factory names
:return: A tuple ({Name -> [Artifacts]}, [Not found factories])
"""
with self.__lock:
factories_set = set(factories)
resolution = {}
unresolved = set()
if not factories:
# Nothing to do...
return resolution, factories_set
for name in factories_set:
try:
# Get the list of factories for this name
factories = self._factories[name]
providers = resolution.setdefault(name, [])
providers.extend(factory.artifact for factory in factories)
except KeyError:
# Factory name not found
unresolved.add(name)
# Sort the artifacts
for artifacts in resolution.values():
artifacts.sort(reverse=True)
return resolution, unresolved
def find_factory(self, factory, artifact_name=None, artifact_version=None):
"""
Find the artifacts that provides the given factory, filtered by name
and version.
:return: The list of artifacts providing the factory, sorted by name
and version
:raise KeyError: Unknown factory
"""
with self.__lock:
# Copy the list of artifacts for this factory
artifacts = [factory.artifact
for factory in self._factories[factory]]
if artifact_name is not None:
# Artifact must be selected
# Prepare the version bean
version = cohorte.repositories.beans.Version(artifact_version)
# Filter results
artifacts = [artifact for artifact in artifacts
if artifact.name == artifact_name and
version.matches(artifact.version)]
if not artifacts:
# No match found
raise KeyError("No matching artifact for {0} -> {1} {2}"
.format(factory, artifact_name, version))
# Sort results
artifacts.sort(reverse=True)
return artifacts
def get_language(self):
"""
Retrieves the language of the artifacts stored in this repository
"""
return self._language
def get_model(self):
"""
Retrieves the component model that can handle the factories of this
repository
"""
return self._model
def load_repositories(self):
"""
Loads the factories according to the repositories
"""
with self.__lock:
if not self._repositories:
# No repository
return
# Walk through artifacts
for repository in self._repositories:
for artifact in repository.walk():
try:
self.add_artifact(artifact)
except ValueError as ex:
# Log the exception instead of stopping here
_logger.warning("Error reading artifact: %s",
ex, exc_info=True)
def __initial_loading(self):
"""
Initial repository loading
"""
self.load_repositories()
self._controller = True
@Validate
def validate(self, context):
"""
Component validated
"""
self._controller = False
# Load repositories in another thread
threading.Thread(target=self.__initial_loading,
name="iPOPO-repository-loader").start()
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
self.clear()
|
isandlaTech/cohorte-devtools
|
qualifier/deploy/cohorte-home/repo/cohorte/repositories/python/ipopo.py
|
Python
|
apache-2.0
| 12,465
|
[
"VisIt"
] |
afa4125144338b84082e38e2fa8bf23c63fc2ee90e8d516c643515d137704ea4
|
#!env python
"""
Applies the masks from a netCDF file to another and saves it in a new file.
"""
import sys
import numpy as np
from netCDF4 import Dataset
if len(sys.argv) < 4:
print "Usage: " + sys.argv[0] + " inputfile maskedfile outputfile"
exit(1)
infile = sys.argv[1]
maskedfile = sys.argv[2]
outfile = sys.argv[3]
with Dataset(infile, 'r') as src, \
Dataset(maskedfile, 'r') as masked, \
Dataset(outfile, 'w', format='NETCDF3_CLASSIC') as dst:
for name, dimension in src.dimensions.items():
dst.createDimension(
name,
len(dimension) if not dimension.isunlimited() else None
)
for name, variable in src.variables.items():
print name
dst.createVariable(name, variable.datatype, variable.dimensions)
addMask = False
for attrname in variable.ncattrs():
dst.variables[name].setncattr(
attrname,
variable.getncattr(attrname)
)
for attrname in ['missing_value', '_FillValue']:
if attrname not in masked.variables[name].ncattrs():
continue
if attrname in dst.variables[name].ncattrs():
continue
dst.variables[name].setncattr(
attrname,
masked.variables[name].getncattr(attrname)
)
addMask = True
if addMask:
print "Adding mask to %s" % name
result = np.ma.masked_array(variable[:])
result.mask = masked.variables[name][0, :].mask
dst.variables[name][:] = result
else:
dst.variables[name][:] = variable[:]
|
DFO-Ocean-Navigator/Ocean-Data-Map-Project
|
scripts/apply_mask.py
|
Python
|
gpl-3.0
| 1,675
|
[
"NetCDF"
] |
a10c46a850f7a32967a4c0114129a8a629d5e565550e3c9608f1b460b2c8f4b4
|
"""
Logging Root
"""
__RCSID__ = "$Id$"
import logging
import time
import sys
from DIRAC.FrameworkSystem.private.standardLogging.LogLevels import LogLevels
from DIRAC.FrameworkSystem.private.standardLogging.Logging import Logging
from DIRAC.Resources.LogBackends.StdoutBackend import StdoutBackend
from DIRAC.Core.Utilities import DIRACSingleton
class LoggingRoot(Logging):
"""
LoggingRoot is a Logging object and it is particular because it is the first parent of the chain.
In this context, it has more possibilities because it is the one that initializes the logger of the
standard logging library and it configures it with the configuration.
There is a difference between the parent Logging and the other because the parent defines the behaviour
of all the Logging objects, so it needs a specific class.
LoggingRoot has to be unique, because we want one and only one parent on the top of the chain: that is why
we created a singleton to keep it unique.
"""
__metaclass__ = DIRACSingleton.DIRACSingleton
# Boolean preventing that the LoggingRoot be configured more than one time
__configuredLogging = False
def __init__(self):
"""
Initialization of the LoggingRoot object.
LoggingRoot :
- initialize the UTC time
- set the correct level defines by the user, or the default
- add the custom level to logging: verbose, notice, always
- register a default backend: stdout : all messages will be displayed here
- update the format according to the command line argument
"""
super(LoggingRoot, self).__init__()
# this line removes some useless information from log records and improves
# the performances
logging._srcfile = None # pylint: disable=protected-access
# initialize the root logger
# actually a child of the root logger to avoid conflicts with other
# libraries which used 'logging'
self._logger = logging.getLogger('dirac')
# prevent propagation to the root logger to avoid conflicts with external libraries
# which want to use the root logger
self._logger.propagate = False
# here we redefine the custom name to the empty string to remove the "\"
# in the display
self._customName = ""
# this level is not the Logging level, it is only used to send all log messages to the central logging system
# to do such an operation, we need to let pass all log messages to the root logger, so all logger needs to be
# at debug. Then, all the backends have a level associated to a Logging level, which can be changed with the
# setLevel method of Logging, and these backends will choose to send the
# log messages or not.
self._logger.setLevel(LogLevels.DEBUG)
# initialization of the UTC time
# Actually, time.gmtime is equal to UTC time because it has its DST flag to 0
# which means there is no clock advance
logging.Formatter.converter = time.gmtime
# initialization of levels
levels = LogLevels.getLevels()
for level in levels:
logging.addLevelName(levels[level], level)
# initialization of the default backend
self._setLevel(LogLevels.NOTICE)
# use the StdoutBackend directly to avoid dependancy loop with ObjectLoader
self._addBackend(StdoutBackend())
# configuration of the level and update of the format
self.__configureLevel()
self._generateBackendFormat()
def initialize(self, systemName, cfgPath, forceInit= False):
"""
Configure the root Logging.
It can be possible to :
- attach it some backends : LogBackends = stdout,stderr,file,server
- attach backend options : BackendOptions { FileName = /tmp/file.log }
- add colors and the path of the call : LogColor = True, LogShowLine = True
- precise a level : LogLevel = DEBUG
:params systemName: string represented as "system name/component name"
:params cfgPath: string of the configuration path
:params forceInit: Force the initialization even if it had already happened.
This should not be used !! The only case is LocalConfiguration.enableCS
In order to take into account extensions' backends
"""
# we have to put the import line here to avoid a dependancy loop
from DIRAC import gConfig
self._lockConfig.acquire()
try:
if not LoggingRoot.__configuredLogging or forceInit:
Logging._componentName = systemName
# Prepare to remove all the backends from the root Logging as in the old gLogger.
# store them in a list handlersToRemove.
# we will remove them later, because some components as ObjectLoader need a backend.
# this can be useful to have logs only in a file for instance.
handlersToRemove = []
for backend in self._backendsList:
handlersToRemove.append(backend.getHandler())
del self._backendsList[:]
# get the backends, the backend options and add them to the root
# Logging
desiredBackends = self.__getBackendsFromCFG(cfgPath)
for backend in desiredBackends:
desiredOptions = self.__getBackendOptionsFromCFG(cfgPath, backend)
self.registerBackend(desiredOptions.get(
'Plugin', backend), desiredOptions)
# Format options
self._options['Color'] = gConfig.getValue(
"%s/LogColor" % cfgPath, False)
# Remove the old backends
for handler in handlersToRemove:
self._logger.removeHandler(handler)
levelName = gConfig.getValue("%s/LogLevel" % cfgPath, None)
if levelName is not None:
self.setLevel(levelName)
LoggingRoot.__configuredLogging = True
finally:
self._lockConfig.release()
def __getBackendsFromCFG(self, cfgPath):
"""
Get backends from the configuration and register them in LoggingRoot.
This is the new way to get the backends providing a general configuration.
:params cfgPath: string of the configuration path
"""
# We have to put the import line here to avoid a dependancy loop
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC import gConfig
# get the second last string representing the component type in the configuration
# example : 'Agents', 'Services'
component = cfgPath.split("/")[-2]
operation = Operations()
# Search desired backends in the component
desiredBackends = gConfig.getValue("%s/%s" % (cfgPath, 'LogBackends'), [])
if not desiredBackends:
# Search desired backends in the operation section according to the
# component type
desiredBackends = operation.getValue(
"Logging/Default%sBackends" % component, [])
if not desiredBackends:
# Search desired backends in the operation section
desiredBackends = operation.getValue("Logging/DefaultBackends", [])
if not desiredBackends:
# Default value
desiredBackends = ['stdout']
return desiredBackends
def __getBackendOptionsFromCFG(self, cfgPath, backend):
"""
Get backend options from the configuration.
:params cfgPath: string of the configuration path
:params backend: string representing a backend identifier: stdout, file, f04
"""
# We have to put the import lines here to avoid a dependancy loop
from DIRAC import gConfig
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getBackendConfig
backendOptions = {}
# Search backends config in the resources section
retDictRessources = getBackendConfig(backend)
if retDictRessources['OK']:
backendOptions = retDictRessources['Value']
# Search backends config in the component to update some options
retDictConfig = gConfig.getOptionsDict(
"%s/%s/%s" % (cfgPath, 'LogBackendsConfig', backend))
if retDictConfig['OK']:
backendOptions.update(retDictConfig['Value'])
else:
# Search backends config in the component with the old option
# 'BackendsOptions'
retDictOptions = gConfig.getOptionsDict("%s/BackendsOptions" % cfgPath)
if retDictOptions['OK']:
# We have to write the deprecated message with the print method because we are changing
# the backends, so we can not be sure of the display using a log.
print "WARNING: Use of a deprecated cfg section: BackendsOptions. Please replace it by BackendConfig."
backendOptions.update(retDictOptions['Value'])
return backendOptions
def __configureLevel(self):
"""
Configure the log level of the root Logging according to the argv parameter
It can be : -d, -dd, -ddd
Work only for clients, scripts and tests
Configuration/Client/LocalConfiguration manages services,agents and executors
"""
debLevs = 0
for arg in sys.argv:
if arg.find("-d") == 0:
debLevs += arg.count("d")
if debLevs == 1:
self._setLevel(LogLevels.VERBOSE)
elif debLevs == 2:
self._setLevel(LogLevels.VERBOSE)
self.showHeaders(True)
elif debLevs >= 3:
self._setLevel(LogLevels.DEBUG)
self.showHeaders(True)
self.showThreadIDs(True)
def enableLogsFromExternalLibs(self):
"""
Enable the display of the logs coming from external libraries
"""
self.__enableLogsFromExternalLibs()
def disableLogsFromExternalLibs(self):
"""
Disable the display of the logs coming from external libraries
"""
self.__enableLogsFromExternalLibs(False)
@staticmethod
def __enableLogsFromExternalLibs(isEnabled=True):
"""
Configure the root logger from 'logging' for an external library use.
By default the root logger is configured with:
- debug level,
- stderr output
- custom format close to the DIRAC format
:params isEnabled: boolean value. True allows the logs in the external lib,
False do not.
"""
rootLogger = logging.getLogger()
rootLogger.handlers = []
if isEnabled:
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s UTC ExternalLibrary/%(name)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
else:
rootLogger.addHandler(logging.NullHandler())
|
andresailer/DIRAC
|
FrameworkSystem/private/standardLogging/LoggingRoot.py
|
Python
|
gpl-3.0
| 10,258
|
[
"DIRAC"
] |
b18d062436656421842fb4e3cc04839f6f3bcb6753c0daef4971bf177621008c
|
from os import environ as env
import re
# This is a config file for deploying to Heroku.
# Set these values as environment variables in your Heroku environment with:
# $ heroku config:add SOME_VAR=some_value
DEBUG = True
CSRF_ENABLED = True
SECRET_KEY = env['SECRET_KEY']
UPLOAD_FOLDER = '/tmp/'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
# Using MongoHQ on Heroku.
# Ref: https://devcenter.heroku.com/articles/mongohq
# Visit your application on Heroku's dashboard and navigate
# to the MongoHQ dashboard. Under Admin > Users you can manage credentials.
# You don't have to set this env var, MongoHQ sets it for you.
MONGO_URL = env['MONGOHQ_URL']
mongo_re = re.compile('''
mongodb://
(?P<username>[^:]+)
:
(?P<password>[^@]+)
@
(?P<host>[^:]+)
:
(?P<port>[0-9]+)
/
(?P<db>[a-z0-9]+)
''', re.VERBOSE)
mongo = mongo_re.match(MONGO_URL)
MONGODB_SETTINGS = {
'DB': mongo.group('db'),
'USERNAME': mongo.group('username'),
'PASSWORD': mongo.group('password'),
'HOST': mongo.group('host'),
'PORT': int(mongo.group('port'))
}
AUTH_USER = env['AUTH_USER']
AUTH_PASS = env['AUTH_PASS']
MAIL_HOST = 'smtp.gmail.com'
MAIL_PORT = 587
MAIL_USER = env['MAIL_USER']
MAIL_PASS = env['MAIL_PASS']
MAIL_TARGETS = ['ftzeng@gmail.com']
GOOGLE_CLIENT_ID = env['GOOGLE_CLIENT_ID']
GOOGLE_CLIENT_SECRET = env['GOOGLE_CLIENT_SECRET']
GOOGLE_REDIRECT_URI = '/oauth2callback'
GITHUB_CLIENT_ID = env['GITHUB_CLIENT_ID']
GITHUB_CLIENT_SECRET = env['GITHUB_CLIENT_SECRET']
GITHUB_REDIRECT_URI = '/github_auth'
|
publicscience/hive
|
config_heroku.py
|
Python
|
mit
| 1,617
|
[
"VisIt"
] |
130c7a2b69e6f57b6e86acab3dc18139da18afc8535107b7b872f18b6e8c2ff1
|
########################################################################
# File: RequestValidator.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2012/09/18 07:55:16
########################################################################
""" :mod: RequestValidator
======================
.. module: RequestValidator
:synopsis: request validator
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
A general and simple request validator checking for required attributes and logic.
It checks if required attributes are set/unset but not for their values.
RequestValidator class implements the DIRACSingleton pattern, no global object is
required to keep a single instance.
If you need to extend this one with your own specific checks consider:
* for adding Operation or Files required attributes use :any:`addReqAttrsCheck` function::
RequestValidator().addReqAttrsCheck( "FooOperation", operationAttrs = [ "Bar", "Buzz"], filesAttrs = [ "LFN" ] )
* for adding generic check define a new callable object ( function or functor ) which takes only one argument,
say for functor::
class MyValidator( RequestValidator ):
@staticmethod
def hasFoo( request ):
if not request.Foo:
return S_ERROR("Foo not set")
return S_OK()
* or function::
def hasBar( request ):
if not request.Bar:
return S_ERROR("Bar not set")
return S_OK()
and add this one to the validators set by calling `RequestValidator().addValidator`, i.e.::
RequestValidator().addValidator( MyValidator.hasFoo )
RequestValidator().addValidator( hasFoo )
Notice that all validators should always return S_ERROR/S_OK, no exceptions from that whatsoever!
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
# #
# @file RequestValidator.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/09/18 07:55:37
# @brief Definition of RequestValidator class.
# # import
import inspect
import six
# # from DIRAC
from DIRAC import S_OK, S_ERROR, gConfig, gLogger
from DIRAC.Core.Security.Properties import FULL_DELEGATION, LIMITED_DELEGATION
from DIRAC.Core.Utilities.DIRACSingleton import DIRACSingleton
from DIRAC.ConfigurationSystem.Client import PathFinder
########################################################################
@six.add_metaclass(DIRACSingleton)
class RequestValidator(object):
"""
.. class:: RequestValidator
This class validates newly created requests (before saving them in RequestDB) for
required attributes.
"""
# # dict with required attrs
reqAttrs = {
"ForwardDISET": {
"Operation": ["Arguments"], "Files": []
},
"PutAndRegister": {
"Operation": ["TargetSE"],
"Files": ["LFN", "PFN"]
},
"ReplicateAndRegister": {
"Operation": ["TargetSE"], "Files": ["LFN"]
},
"PhysicalRemoval": {
"Operation": ["TargetSE"], "Files": ["PFN"]
},
"RemoveFile": {
"Operation": [], "Files": ["LFN"]
},
"RemoveReplica": {
"Operation": ["TargetSE"],
"Files": ["LFN"]
},
"ReTransfer": {
"Operation": ["TargetSE"],
"Files": ["LFN", "PFN"],
},
"RegisterFile": {
"Operation": [],
"Files": ["LFN", "PFN", "ChecksumType", "Checksum", "GUID"],
},
"RegisterReplica": {
"Operation": ["TargetSE"],
"Files": ["LFN", "PFN"],
}
}
# All the operationHandlers defined in the CS
opHandlers = set()
def __init__(self):
""" c'tor
just setting validation order
"""
self.validator = (self._hasRequestName,
self._hasOwner,
self._hasOperations,
self._hasType,
self._hasFiles,
self._hasRequiredAttrs,
self._hasChecksumAndChecksumType)
configPath = PathFinder.getAgentSection("RequestManagement/RequestExecutingAgent")
# # operation handlers over here
opHandlersPath = "%s/%s" % (configPath, "OperationHandlers")
opHandlers = gConfig.getSections(opHandlersPath)
if not opHandlers["OK"]:
gLogger.error(opHandlers["Message"])
else:
self.opHandlers = set(opHandlers["Value"])
@classmethod
def addReqAttrsCheck(cls, operationType, operationAttrs=None, filesAttrs=None):
""" add required attributes of Operation of type :operationType:
:param str operationType: Operation.Type
:param operationAttrs: required Operation attributes
:type operationAttrs: python:list
:param filesAttrs: required Files attributes
:type filesAttrs: python:list
"""
toUpdate = {"Operation": operationAttrs if operationAttrs else [],
"Files": filesAttrs if filesAttrs else []}
if operationType not in cls.reqAttrs:
cls.reqAttrs[operationType] = {"Operation": [], "Files": []}
for key, attrList in cls.reqAttrs[operationType].items():
cls.reqAttrs[operationType][key] = list(set(attrList + toUpdate[key]))
@classmethod
def addValidator(cls, fcnObj):
""" add `fcnObj` validator """
if not callable(fcnObj):
return S_ERROR("supplied argument is not callable")
args = inspect.getargspec(fcnObj).args
if len(args) not in (1, 2):
return S_ERROR("wrong number of arguments for supplied function object")
cls.validator = cls.validator + tuple(fcnObj, )
return S_OK()
def validate(self, request):
""" validation of a given `request`
:param ~Request.Request request: Request instance
"""
for validator in self.validator:
isValid = validator(request)
if not isValid["OK"]:
return isValid
# # if we're here request is more or less valid
return S_OK()
@staticmethod
def _hasDIRACSetup(request):
""" required attribute - DIRACSetup """
if not request.DIRACSetup:
return S_ERROR("DIRACSetup not set")
return S_OK()
@staticmethod
def _hasOwner(request):
""" required attributes OwnerDn and OwnerGroup """
if not request.OwnerDN:
return S_ERROR("Request '%s' is missing OwnerDN value" % request.RequestName)
if not request.OwnerGroup:
return S_ERROR("Request '%s' is missing OwnerGroup value" % request.RequestName)
return S_OK()
@staticmethod
def _hasRequestName(request):
""" required attribute: RequestName """
if not request.RequestName:
return S_ERROR("RequestName not set")
return S_OK()
@staticmethod
def _hasOperations(request):
""" at least one operation is in """
if not len(request):
return S_ERROR("Operations not present in request '%s'" % request.RequestName)
return S_OK()
@staticmethod
def _hasType(request):
""" operation type is set """
for operation in request:
if not operation.Type:
return S_ERROR("Operation #%d in request '%s' hasn't got Type set" %
(request.indexOf(operation), request.RequestName))
return S_OK()
@classmethod
def _hasFiles(cls, request):
""" check for files presence """
for operation in request:
if operation.Type not in cls.reqAttrs:
return S_OK()
if cls.reqAttrs[operation.Type]["Files"] and not len(operation):
return S_ERROR(
"Operation #%d of type '%s' hasn't got files to process." %
(request.indexOf(operation), operation.Type))
if not cls.reqAttrs[operation.Type]["Files"] and len(operation):
return S_ERROR(
"Operation #%d of type '%s' has got files to process." %
(request.indexOf(operation), operation.Type))
return S_OK()
@classmethod
def _hasRequiredAttrs(cls, request):
""" check required attributes for operations and files """
for operation in request:
if operation.Type in cls.reqAttrs:
opAttrs = cls.reqAttrs[operation.Type]["Operation"]
for opAttr in opAttrs:
if not getattr(operation, opAttr):
return S_ERROR("Operation #%d of type '%s' is missing %s attribute." %
(request.indexOf(operation), operation.Type, opAttr))
fileAttrs = cls.reqAttrs[operation.Type]["Files"]
for opFile in operation:
for fileAttr in fileAttrs:
if not getattr(opFile, fileAttr):
return S_ERROR("Operation #%d of type '%s' is missing %s attribute for file." %
(request.indexOf(operation), operation.Type, fileAttr))
return S_OK()
@classmethod
def _hasChecksumAndChecksumType(cls, request):
""" Checksum and ChecksumType should be specified """
for operation in request:
for opFile in operation:
if any([opFile.Checksum, opFile.ChecksumType]) and not all(
[opFile.Checksum, opFile.ChecksumType]):
return S_ERROR("File in operation #%d is missing Checksum (%s) or ChecksumType (%s)" %
(request.indexOf(operation), opFile.Checksum, opFile.ChecksumType))
return S_OK()
def _hasExistingOperationTypes(self, request):
""" Check that there is a handler defined in the CS for each operation type"""
requiredHandlers = set([op.Type for op in request])
nonExistingHandlers = requiredHandlers - self.opHandlers
if nonExistingHandlers:
return S_ERROR(
"The following operation type(s) have no handlers defined in the CS: %s" %
nonExistingHandlers)
return S_OK()
@staticmethod
def setAndCheckRequestOwner(request, remoteCredentials):
"""
CAUTION: meant to be called on the server side.
(does not make much sense otherwise)
Sets the ownerDN and ownerGroup of the Request from
the client's credentials.
If they are already set, make sure the client is allowed to do so
(FULL_DELEGATION or LIMITED_DELEGATION). This is the case of pilots or
the RequestExecutingAgent
:param request: the request to test
:param remoteCredentials: credentials from the clients
:returns: True if everything is fine, False otherwise
"""
credDN = remoteCredentials['DN']
credGroup = remoteCredentials['group']
credProperties = remoteCredentials['properties']
# If the owner or the group was not set, we use the one of the credentials
if not request.OwnerDN or not request.OwnerGroup:
request.OwnerDN = credDN
request.OwnerGroup = credGroup
return True
# From here onward, we expect the ownerDN/group to already have a value
# If the credentials in the Request match those from the credentials, it's OK
if request.OwnerDN == credDN and request.OwnerGroup == credGroup:
return True
# From here, something/someone is putting a request on behalf of someone else
# Only allow this if the credentials have Full or Limited delegation properties
if FULL_DELEGATION in credProperties or LIMITED_DELEGATION in credProperties:
return True
return False
|
yujikato/DIRAC
|
src/DIRAC/RequestManagementSystem/private/RequestValidator.py
|
Python
|
gpl-3.0
| 11,219
|
[
"DIRAC"
] |
0dd79e553b317d9c153a3b144d3c625473269801d13a8689467cce3ab0d11548
|
import numpy as np
import copy
import numpy.linalg as la
import summary_output as SUMMARY
import robust as ROBUST
import user_output as USER
from utils import spdot, sphstack, RegressionPropsY, RegressionPropsVM
__author__ = "Luc Anselin luc.anselin@asu.edu, David C. Folch david.folch@asu.edu, Jing Yao jingyao@asu.edu"
__all__ = ["TSLS"]
class BaseTSLS(RegressionPropsY, RegressionPropsVM):
"""
Two stage least squares (2SLS) (note: no consistency checks,
diagnostics or constant added)
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
yend : array
Two dimensional array with n rows and one column for each
endogenous variable
q : array
Two dimensional array with n rows and one column for each
external exogenous variable to use as instruments (note:
this should not contain any variables from x); cannot be
used in combination with h
h : array
Two dimensional array with n rows and one column for each
exogenous variable to use as instruments (note: this
can contain variables from x); cannot be used in
combination with q
robust : string
If 'white', then a White consistent estimator of the
variance-covariance matrix is given. If 'hac', then a
HAC consistent estimator of the variance-covariance
matrix is given. Default set to None.
gwk : pysal W object
Kernel spatial weights needed for HAC estimation. Note:
matrix must have ones along the main diagonal.
sig2n_k : boolean
If True, then use n-k to estimate sigma^2. If False, use n.
Attributes
----------
betas : array
kx1 array of estimated coefficients
u : array
nx1 array of residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant)
kstar : integer
Number of endogenous variables.
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
yend : array
Two dimensional array with n rows and one column for each
endogenous variable
q : array
Two dimensional array with n rows and one column for each
external exogenous variable used as instruments
z : array
nxk array of variables (combination of x and yend)
h : array
nxl array of instruments (combination of x and q)
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
vm : array
Variance covariance matrix (kxk)
utu : float
Sum of squared residuals
sig2 : float
Sigma squared used in computations
sig2n : float
Sigma squared (computed with n in the denominator)
sig2n_k : float
Sigma squared (computed with n-k in the denominator)
hth : float
H'H
hthi : float
(H'H)^-1
varb : array
(Z'H (H'H)^-1 H'Z)^-1
zthhthi : array
Z'H(H'H)^-1
pfora1a2 : array
n(zthhthi)'varb
Examples
--------
>>> import numpy as np
>>> import pysal
>>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
>>> y = np.array(db.by_col("CRIME"))
>>> y = np.reshape(y, (49,1))
>>> X = []
>>> X.append(db.by_col("INC"))
>>> X = np.array(X).T
>>> X = np.hstack((np.ones(y.shape),X))
>>> yd = []
>>> yd.append(db.by_col("HOVAL"))
>>> yd = np.array(yd).T
>>> q = []
>>> q.append(db.by_col("DISCBD"))
>>> q = np.array(q).T
>>> reg = BaseTSLS(y, X, yd, q=q)
>>> print reg.betas
[[ 88.46579584]
[ 0.5200379 ]
[ -1.58216593]]
>>> reg = BaseTSLS(y, X, yd, q=q, robust="white")
"""
def __init__(self, y, x, yend, q=None, h=None,
robust=None, gwk=None, sig2n_k=False):
if issubclass(type(q), np.ndarray) and issubclass(type(h), np.ndarray):
raise Exception, "Please do not provide 'q' and 'h' together"
if q is None and h is None:
raise Exception, "Please provide either 'q' or 'h'"
self.y = y
self.n = y.shape[0]
self.x = x
self.kstar = yend.shape[1]
# including exogenous and endogenous variables
z = sphstack(self.x, yend)
if type(h).__name__ not in ['ndarray', 'csr_matrix']:
# including exogenous variables and instrument
h = sphstack(self.x, q)
self.z = z
self.h = h
self.q = q
self.yend = yend
# k = number of exogenous variables and endogenous variables
self.k = z.shape[1]
hth = spdot(h.T, h)
hthi = la.inv(hth)
zth = spdot(z.T, h)
hty = spdot(h.T, y)
factor_1 = np.dot(zth, hthi)
factor_2 = np.dot(factor_1, zth.T)
# this one needs to be in cache to be used in AK
varb = la.inv(factor_2)
factor_3 = np.dot(varb, factor_1)
betas = np.dot(factor_3, hty)
self.betas = betas
self.varb = varb
self.zthhthi = factor_1
# predicted values
self.predy = spdot(z, betas)
# residuals
u = y - self.predy
self.u = u
# attributes used in property
self.hth = hth # Required for condition index
self.hthi = hthi # Used in error models
self.htz = zth.T
if robust:
self.vm = ROBUST.robust_vm(reg=self, gwk=gwk, sig2n_k=sig2n_k)
if sig2n_k:
self.sig2 = self.sig2n_k
else:
self.sig2 = self.sig2n
@property
def pfora1a2(self):
if 'pfora1a2' not in self._cache:
self._cache['pfora1a2'] = self.n * \
np.dot(self.zthhthi.T, self.varb)
return self._cache['pfora1a2']
@property
def vm(self):
try:
return self._cache['vm']
except AttributeError:
self._cache = {}
self._cache['vm'] = np.dot(self.sig2, self.varb)
except KeyError:
self._cache['vm'] = np.dot(self.sig2, self.varb)
return self._cache['vm']
@vm.setter
def vm(self, val):
try:
self._cache['vm'] = val
except AttributeError:
self._cache = {}
self._cache['vm'] = val
except KeyError:
self._cache['vm'] = val
class TSLS(BaseTSLS):
"""
Two stage least squares with results and diagnostics.
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
yend : array
Two dimensional array with n rows and one column for each
endogenous variable
q : array
Two dimensional array with n rows and one column for each
external exogenous variable to use as instruments (note:
this should not contain any variables from x)
w : pysal W object
Spatial weights object (required if running spatial
diagnostics)
robust : string
If 'white', then a White consistent estimator of the
variance-covariance matrix is given. If 'hac', then a
HAC consistent estimator of the variance-covariance
matrix is given. Default set to None.
gwk : pysal W object
Kernel spatial weights needed for HAC estimation. Note:
matrix must have ones along the main diagonal.
sig2n_k : boolean
If True, then use n-k to estimate sigma^2. If False, use n.
spat_diag : boolean
If True, then compute Anselin-Kelejian test (requires w)
vm : boolean
If True, include variance-covariance matrix in summary
results
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_yend : list of strings
Names of endogenous variables for use in output
name_q : list of strings
Names of instruments for use in output
name_w : string
Name of weights matrix for use in output
name_gwk : string
Name of kernel weights matrix for use in output
name_ds : string
Name of dataset for use in output
Attributes
----------
summary : string
Summary of regression results and diagnostics (note: use in
conjunction with the print command)
betas : array
kx1 array of estimated coefficients
u : array
nx1 array of residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant)
kstar : integer
Number of endogenous variables.
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
yend : array
Two dimensional array with n rows and one column for each
endogenous variable
q : array
Two dimensional array with n rows and one column for each
external exogenous variable used as instruments
z : array
nxk array of variables (combination of x and yend)
h : array
nxl array of instruments (combination of x and q)
robust : string
Adjustment for robust standard errors
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
vm : array
Variance covariance matrix (kxk)
pr2 : float
Pseudo R squared (squared correlation between y and ypred)
utu : float
Sum of squared residuals
sig2 : float
Sigma squared used in computations
std_err : array
1xk array of standard errors of the betas
z_stat : list of tuples
z statistic; each tuple contains the pair (statistic,
p-value), where each is a float
ak_test : tuple
Anselin-Kelejian test; tuple contains the pair (statistic,
p-value)
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_yend : list of strings
Names of endogenous variables for use in output
name_z : list of strings
Names of exogenous and endogenous variables for use in
output
name_q : list of strings
Names of external instruments
name_h : list of strings
Names of all instruments used in ouput
name_w : string
Name of weights matrix for use in output
name_gwk : string
Name of kernel weights matrix for use in output
name_ds : string
Name of dataset for use in output
title : string
Name of the regression method used
sig2n : float
Sigma squared (computed with n in the denominator)
sig2n_k : float
Sigma squared (computed with n-k in the denominator)
hth : float
H'H
hthi : float
(H'H)^-1
varb : array
(Z'H (H'H)^-1 H'Z)^-1
zthhthi : array
Z'H(H'H)^-1
pfora1a2 : array
n(zthhthi)'varb
Examples
--------
We first need to import the needed modules, namely numpy to convert the
data we read into arrays that ``spreg`` understands and ``pysal`` to
perform all the analysis.
>>> import numpy as np
>>> import pysal
Open data on Columbus neighborhood crime (49 areas) using pysal.open().
This is the DBF associated with the Columbus shapefile. Note that
pysal.open() also reads data in CSV format; since the actual class
requires data to be passed in as numpy arrays, the user can read their
data in using any method.
>>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
Extract the CRIME column (crime rates) from the DBF file and make it the
dependent variable for the regression. Note that PySAL requires this to be
an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
that other packages accept.
>>> y = np.array(db.by_col("CRIME"))
>>> y = np.reshape(y, (49,1))
Extract INC (income) vector from the DBF to be used as
independent variables in the regression. Note that PySAL requires this to
be an nxj numpy array, where j is the number of independent variables (not
including a constant). By default this model adds a vector of ones to the
independent variables passed in, but this can be overridden by passing
constant=False.
>>> X = []
>>> X.append(db.by_col("INC"))
>>> X = np.array(X).T
In this case we consider HOVAL (home value) is an endogenous regressor.
We tell the model that this is so by passing it in a different parameter
from the exogenous variables (x).
>>> yd = []
>>> yd.append(db.by_col("HOVAL"))
>>> yd = np.array(yd).T
Because we have endogenous variables, to obtain a correct estimate of the
model, we need to instrument for HOVAL. We use DISCBD (distance to the
CBD) for this and hence put it in the instruments parameter, 'q'.
>>> q = []
>>> q.append(db.by_col("DISCBD"))
>>> q = np.array(q).T
We are all set with the preliminars, we are good to run the model. In this
case, we will need the variables (exogenous and endogenous) and the
instruments. If we want to have the names of the variables printed in the
output summary, we will have to pass them in as well, although this is optional.
>>> reg = TSLS(y, X, yd, q, name_x=['inc'], name_y='crime', name_yend=['hoval'], name_q=['discbd'], name_ds='columbus')
>>> print reg.betas
[[ 88.46579584]
[ 0.5200379 ]
[ -1.58216593]]
"""
def __init__(self, y, x, yend, q,
w=None,
robust=None, gwk=None, sig2n_k=False,
spat_diag=False,
vm=False, name_y=None, name_x=None,
name_yend=None, name_q=None,
name_w=None, name_gwk=None, name_ds=None):
n = USER.check_arrays(y, x, yend, q)
USER.check_y(y, n)
USER.check_weights(w, y)
USER.check_robust(robust, gwk)
USER.check_spat_diag(spat_diag, w)
x_constant = USER.check_constant(x)
BaseTSLS.__init__(self, y=y, x=x_constant, yend=yend, q=q,
robust=robust, gwk=gwk, sig2n_k=sig2n_k)
self.title = "TWO STAGE LEAST SQUARES"
self.name_ds = USER.set_name_ds(name_ds)
self.name_y = USER.set_name_y(name_y)
self.name_x = USER.set_name_x(name_x, x)
self.name_yend = USER.set_name_yend(name_yend, yend)
self.name_z = self.name_x + self.name_yend
self.name_q = USER.set_name_q(name_q, q)
self.name_h = USER.set_name_h(self.name_x, self.name_q)
self.robust = USER.set_robust(robust)
self.name_w = USER.set_name_w(name_w, w)
self.name_gwk = USER.set_name_w(name_gwk, gwk)
SUMMARY.TSLS(reg=self, vm=vm, w=w, spat_diag=spat_diag)
def _test():
import doctest
start_suppress = np.get_printoptions()['suppress']
np.set_printoptions(suppress=True)
doctest.testmod()
np.set_printoptions(suppress=start_suppress)
if __name__ == '__main__':
_test()
import numpy as np
import pysal
db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
y_var = 'CRIME'
y = np.array([db.by_col(y_var)]).reshape(49, 1)
x_var = ['INC']
x = np.array([db.by_col(name) for name in x_var]).T
yd_var = ['HOVAL']
yd = np.array([db.by_col(name) for name in yd_var]).T
q_var = ['DISCBD']
q = np.array([db.by_col(name) for name in q_var]).T
w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
w.transform = 'r'
tsls = TSLS(y, x, yd, q, w=w, spat_diag=True, name_y=y_var, name_x=x_var,
name_yend=yd_var, name_q=q_var, name_ds='columbus', name_w='columbus.gal')
print tsls.summary
|
schmidtc/pysal
|
pysal/spreg/twosls.py
|
Python
|
bsd-3-clause
| 19,084
|
[
"COLUMBUS"
] |
2e08fafdd4610edd9516cc36a8553d0bafc2d871c5147865f2ef0a725108684f
|
import math
ROWS = 8 # rows in maze
COLS = 8 # columns in maze
GRID_DX = 20.0 # x-dimension of the grid world
GRID_DY = 20.0 # y-dimension of the grid world
MAX_STEPS = ROWS * COLS * 40 # max number of steps - no need to visit each cell more then twice! (turns count now!)
STEP_DELAY = 3.0 # number of seconds to wait between the sense-act-step repeats
NUDGE_X = 20.0 # shift the island in +x by ...
NUDGE_Y = 20.0 # shift the island in +y by ...
WALL_TEMPLATE = "data/shapes/wall/BrickWall.xml"
INITIAL_EPSILON = 0.1
HISTORY_LENGTH = 5 # number of state-action pairs used to determine if the agent is stuck
OBSTACLE_MASK = 1 #0b0001
AGENT_MASK = 2 #0b0010
# maze environment
MAZE_MOVES = [(1,0), (-1,0), (0,1), (0,-1)]
MAZE_NULL_MOVE = len(MAZE_MOVES)
# continuous environment
CONT_MAZE_TURN_BY = 90 # how many degrees to turn by every time
CONT_MAZE_WALK_BY = GRID_DX # how many units to advance by every step forward
CONT_MAZE_ACTIONS = {'FWD':0, 'CW':3, 'CCW':2, 'BCK':1} # in Granular, FWD is N, BCK is S, CW is E and CCW is W
CONT_MAZE_N_ACTIONS = 4 # number of actions
CONT_MAZE_N_RAYS = 4 # number of rays around the agent, starting from the front
CONT_MAZE_MAX_DISTANCE = math.hypot(ROWS*GRID_DX, COLS*GRID_DY) # max distance within the maze
|
JiahuiGuo/AIOpenNERO
|
Maze/constants.py
|
Python
|
mit
| 1,254
|
[
"VisIt"
] |
dfdf2ea5e94dbf9b9d68358587a298b67ac5186c4069644035d6c8605c3f4118
|
# !/bin/python
# -*- coding: latin-1 -*-
# Copyright (C) 2009-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# Hexa : Creation d'hexaedres
import hexablock
import os
#---+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
doc = hexablock.addDocument ("default")
vx = doc.addVector (1,0,0)
vy = doc.addVector (0,1,0)
vz = doc.addVector (0,0,1)
vxy = doc.addVector (1,1,0)
nbr_files = 0
# ======================================================= save_vtk
def save_vtk () :
global nbr_files
nom = "lecas%d.vtk" % nbr_files
nbr_files += 1
doc.saveVtk (nom)
# ======================================================= carre
def carre (x) :
return x*x
# ======================================================= get_center
def get_center (quad) :
px = 0
py = 0
pz = 0
for nv in range (4) :
vertex = quad.getVertex (nv)
px += vertex.getX() / 4
py += vertex.getY() / 4
pz += vertex.getZ() / 4
return [ px, py, pz ]
# ======================================================= nearest
def nearest (grid, vertex) :
nbre = grid.countVertex()
dmin = 1e+6
result = None
px = vertex.getX()
py = vertex.getY()
pz = vertex.getZ()
for nro in range (nbre) :
v1 = grid.getVertex (nro)
d2 = carre(px-v1.getX()) + carre(py-v1.getY()) + carre(pz-v1.getZ())
if (d2 < dmin) :
result = v1
dmin = d2
print vertex.getName () , px, py, pz, " -> ", result.getName()
return result
# ======================================================= nearest_quad
def nearest_quad (grid, quad) :
dmin = 1e+16
result = None
[ox, oy, oz] = get_center (quad)
nbre = grid.countQuad ()
for nro in range (nbre) :
q1 = grid.getQuad (nro)
if q1 != None :
[px, py, pz] = get_center (q1)
d2 = carre(px-ox) + carre(py-oy) + carre(pz-oz)
if (d2 < dmin) :
result = q1
dmin = d2
print quad.getName () , px, py, pz, " -> ", result.getName()
return result
# ======================================================= insert_cylinder
def insert_cylinder (plaque, nx, ny) :
hexa = plaque.getHexaIJK (nx, ny, 0)
xmin = 666 ; ymin = xmin ; zmin = xmin
xmax = -666 ; ymax = xmax ; zmax = xmax
tabv1 = []
for nv in range (8) :
node = hexa.getVertex (nv)
xmin = min (xmin, node.getX()) ; xmax = max (xmax, node.getX())
ymin = min (ymin, node.getY()) ; ymax = max (ymax, node.getY())
zmin = min (zmin, node.getZ()) ; zmax = max (zmax, node.getZ())
tabv1.append (node)
doc.removeHexa (hexa)
save_vtk ()
dx = (xmax - xmin)/2
dz = (zmax - zmin)/2
xorig = (xmin + xmax)/2
yorig = (ymin + ymax)/2
zorig = (zmin + zmax)/2 - dz
orig = doc.addVertex (xorig, yorig, zorig)
nr = 1
na = 4
nh = 1
rext = dx
rint = rext/2
haut = 1
angle = 360
pipe = doc.makePipeUni (orig, vxy,vz, rint,rext,angle,haut, nr,na,nh)
hexablock.what ()
tabquad = []
tabv0 = []
for nq in range (4) :
quad = pipe.getQuadJK (1, nq, 0)
tabquad.append (quad)
print " .. tabquad[0] = ", tabquad[0].getName ()
cible = nearest_quad (plaque, tabquad[0])
tabquad[0]. setColor (5)
cible . setColor (5)
save_vtk ()
va1 = tabquad[0].getVertex (0)
va2 = tabquad[0].getVertex (1)
vb1 = cible.nearestVertex (va1)
vb2 = cible.nearestVertex (va2)
doc.setLevel (1)
doc.joinQuadsUni (tabquad, cible, va1, vb1, va2, vb2, 1)
hexablock.what ()
save_vtk ()
return
doc.setLevel (1)
for nv in range (8) :
ier = doc.mergeVertices (tabv0[nv], tabv1[nv])
print "ier = ", ier
save_vtk ()
# ======================================================= test_2013
def test_2013 () :
orig = doc.addVertex (0,0,0)
lx = 3
ly = lx
lz = 1
nx = 3
ny = nx
nz = 1
plaque = doc.makeCartesianUni (orig, vx,vy,vz, lx, ly, lz, nx,ny,nz)
save_vtk ()
insert_cylinder (plaque, 1, 1)
return doc
# ================================================================= Begin
doc = test_2013 ()
doc.addLaws (0.1, True)
mesh_hexas = hexablock.mesh (doc)
|
FedoraScientific/salome-hexablock
|
src/TEST_PY/cas_2013/cas_2013.py
|
Python
|
lgpl-2.1
| 5,166
|
[
"VTK"
] |
6cf081c6322f30c6ffec0ec2b457552f61a1b9e540315e494fb2f86603db014e
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPybedtools(PythonPackage):
"""pybedtools wraps and extends BEDTools and offers
feature-level manipulations from within Python."""
homepage = "http://daler.github.io/pybedtools"
url = "https://pypi.io/packages/source/p/pybedtools/pybedtools-0.7.10.tar.gz"
version('0.7.10', 'f003c67e22c48b77f070538368ece70c')
version('0.6.9', 'b7df049036422d8c6951412a90e83dca')
depends_on('py-setuptools', type='build')
depends_on('bedtools2', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-pandas', type=('build', 'run'))
depends_on('py-pysam@0.8.1:', type=('build', 'run'), when='@0.7.0:')
depends_on('py-pysam@0.7.7', type=('build', 'run'), when='@0.6.9')
depends_on('py-six', type=('build', 'run'))
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/py-pybedtools/package.py
|
Python
|
lgpl-2.1
| 2,071
|
[
"pysam"
] |
4d2c8998bbe25214fd5fdc88514e0615b841fca97884911b92752ad9c5c60dbc
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Exception types for GRIT.
'''
class Base(Exception):
'''A base exception that uses the class's docstring in addition to any
user-provided message as the body of the Base.
'''
def __init__(self, msg=''):
if len(msg):
if self.__doc__:
msg = self.__doc__ + ': ' + msg
else:
msg = self.__doc__
Exception.__init__(self, msg)
class Parsing(Base):
'''An error occurred parsing a GRD or XTB file.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class UnknownElement(Parsing):
'''An unknown node type was encountered.'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class MissingElement(Parsing):
'''An expected element was missing.'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class UnexpectedChild(Parsing):
'''An unexpected child element was encountered (on a leaf node).'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class UnexpectedAttribute(Parsing):
'''The attribute was not expected'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class UnexpectedContent(Parsing):
'''This element should not have content'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class MissingMandatoryAttribute(Parsing):
'''This element is missing a mandatory attribute'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class MutuallyExclusiveMandatoryAttribute(Parsing):
'''This element has 2 mutually exclusive mandatory attributes'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class DuplicateKey(Parsing):
'''A duplicate key attribute was found.'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class TooManyExamples(Parsing):
'''Only one <ex> element is allowed for each <ph> element.'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class GotPathExpectedFilenameOnly(Parsing):
'''The 'filename' attribute of an <output> node must not be a path, only
a filename.
'''
def __init__(self, msg=''):
Parsing.__init__(self, msg)
class InvalidMessage(Base):
'''The specified message failed validation.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class InvalidTranslation(Base):
'''Attempt to add an invalid translation to a clique.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class NoSuchTranslation(Base):
'''Requested translation not available'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class NotReady(Base):
'''Attempt to use an object before it is ready, or attempt to translate
an empty document.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class TooManyPlaceholders(Base):
'''Too many placeholders for elements of the same type.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class MismatchingPlaceholders(Base):
'''Placeholders do not match.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class InvalidPlaceholderName(Base):
'''Placeholder name can only contain A-Z, a-z, 0-9 and underscore.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class BlockTagInTranslateableChunk(Base):
'''A block tag was encountered where it wasn't expected.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class SectionNotFound(Base):
'''The section you requested was not found in the RC file. Make
sure the section ID is correct (matches the section's ID in the RC file).
Also note that you may need to specify the RC file's encoding (using the
encoding="" attribute) if it is not in the default Windows-1252 encoding.
'''
def __init__(self, msg=''):
Base.__init__(self, msg)
class IdRangeOverlap(Base):
'''ID range overlap.'''
def __init__(self, msg=''):
Base.__init__(self, msg)
|
JoKaWare/WTL-DUI
|
tools/grit/grit/exception.py
|
Python
|
bsd-3-clause
| 3,985
|
[
"xTB"
] |
5fd136be4e599ed1a94bbd7fcdb18a57a9ae064dd7d2d8fbb46a6adb6c94f5c1
|
# This file is now deprecated and slated to be deleted.
# Don't add code here anymore. put it in punkin_chunker instead.
import sys
import os
import subprocess
from subprocess import check_output
from os.path import split, join
#import random
#import math
from Bio import SeqIO
#from Bio import SeqRecord
from Bio.Blast.Applications import NcbiblastnCommandline
from Blast_Result import Blast_Result
from Blast_Result_Set import Blast_Result_Set
import operator
def CreateBlastDatabase(HLAReferenceFilename):
print ('Creating a blast database.')
makeBlastDB_cline = ('makeblastdb'
+ ' -in ' + HLAReferenceFilename
# + ' -parse_seqids -dbtype nucl')
+ ' -dbtype nucl')
print ('MakeDB Commandline:\n' + makeBlastDB_cline)
subprocess.call(makeBlastDB_cline, shell=True)
# This method is a directory-safe way to open up a write file.
def createOutputFile(outputfileName):
tempDir, tempFilename = split(outputfileName)
if not os.path.isdir(tempDir):
os.makedirs(tempDir)
resultsOutput = open(outputfileName, 'w')
return resultsOutput
def printSortedFastaFiles(ResultSets, outputDirectory, finalBlastSummaryOutput):
sortedOutputDirectory = join(outputDirectory, 'SortedReads')
# Store Tuples: (Gene, OutputFile, readCount)
geneLevelOutputFiles = []
# Store Tuples: (Gene, Group, OutputFile, readCount)
groupLevelOutputFiles = []
unsortedReadOutputFile = createOutputFile(join(sortedOutputDirectory,'UnsortedReads.' + FileOutputFormat))
unsortedReadCount = 0
global FileOutputFormat
for currentResultSet in ResultSets:
currentGene = currentResultSet.AssignedGene
currentGroup = currentResultSet.AssignedAlleleGroup
#Here's the logic I'm about to accomplish:
# If we have the gene assigned:
# If we have the group assigned:
# Write file to group level output.
# Else
# Write file to Gene level output.
# Else
# Write to rejected read output.
# If the gene is assigned
if(len(currentGene) > 0 and currentGene != '-1'):
# If the allele Group (first field) is assigned
if(len(currentGroup) > 0 and currentGroup != '-1'):
currentGroupLevelOutputFile = None
# Search for existing Gene level outputffiles.
foundGroupLevelOutput = False
# If we already have a group level output file in our list, use that one.
for groupLevelIndex,groupLevelOutputFile in enumerate(groupLevelOutputFiles):
if (groupLevelOutputFile[0] == currentGene and groupLevelOutputFile[1] == currentGroup):
foundGroupLevelOutput = True
currentGroupLevelOutputFile = groupLevelOutputFile[2]
# This will increment the read count, by replacing the whole groupLevelOutputFile tuple.
# There is certainly a better way to do this.
groupLevelOutputFiles[groupLevelIndex] = (
groupLevelOutputFile[0],
groupLevelOutputFile[1],
groupLevelOutputFile[2],
groupLevelOutputFile[3] + 1)
# None found, make a new output file.
if not foundGroupLevelOutput:
currentGroupLevelOutputFile = createOutputFile(join(sortedOutputDirectory,'HLA-' + currentGene + '_' + currentGroup + '.' + FileOutputFormat))
groupLevelOutputFiles.append((currentGene,currentGroup,currentGroupLevelOutputFile,1))
# Print the sequence to the Group level output.
if foundGroupLevelOutput != None:
SeqIO.write([currentResultSet.readObject], currentGroupLevelOutputFile, FileOutputFormat)
else:
print ('This read maps to a gene but not a group:' + str(currentResultSet.readID))
# Print the read to a group level
currentGeneLevelOutputFile = None
# Search for existing Gene level outputffiles.
foundGeneLevelOutput = False
# If we already have a gene level output file in our list, use that one.
for geneLevelIndex, geneLevelOutputFile in enumerate(geneLevelOutputFiles):
if geneLevelOutputFile[0] == currentGene:
foundGeneLevelOutput = True
currentGeneLevelOutputFile = geneLevelOutputFile[1]
geneLevelOutputFiles[geneLevelIndex] = (
geneLevelOutputFile[0],
geneLevelOutputFile[1],
geneLevelOutputFile[2] + 1)
# None found, make a new output file.
if not foundGeneLevelOutput:
currentGeneLevelOutputFile = createOutputFile(join(sortedOutputDirectory,'HLA-' + currentGene + '.' + FileOutputFormat))
geneLevelOutputFiles.append((currentGene,currentGeneLevelOutputFile,1))
# Print the sequence to the Gene level output.
if foundGeneLevelOutput != None:
SeqIO.write([currentResultSet.readObject], currentGeneLevelOutputFile, FileOutputFormat)
# This else corresponds to if there was no gene assigned for this read.
# I need to do something with the unsorted reads.
else:
unsortedReadCount += 1
SeqIO.write([currentResultSet.readObject], unsortedReadOutputFile, FileOutputFormat)
#print ('YOU NEED TO DO SOMETHING WITH THIS READ WHICH DOES NOT MAP TO THE REFERENCE:' + str(currentResultSet.readID))
# Write sort results to the output file and close the Gene and Group specific fasta files.
finalBlastSummaryOutput.write('\n\nSorting Read Results:\n')
#for geneLevelOutputFile in geneLevelOutputFiles:
for geneLevelOutputFile in sorted(geneLevelOutputFiles, key=operator.itemgetter(2), reverse=True):
finalBlastSummaryOutput.write('HLA-' + geneLevelOutputFile[0] + ': ' + str(geneLevelOutputFile[2]) + ' Reads\n')
geneLevelOutputFile[1].close()
#for groupLevelOutputFile in groupLevelOutputFiles:
#Sorted by number of reads
for groupLevelOutputFile in sorted(groupLevelOutputFiles, key=operator.itemgetter(3), reverse=True):
finalBlastSummaryOutput.write('HLA-' + groupLevelOutputFile[0] + groupLevelOutputFile[1] + ': ' + str(groupLevelOutputFile[3]) + ' Reads\n')
groupLevelOutputFile[2].close()
finalBlastSummaryOutput.write('Unsorted Reads: ' + str(unsortedReadCount) + ' Reads\n')
unsortedReadOutputFile.close()
"""
def BlastMinionReadsAgainstGroupwiseReference():
print ('Time to sort our MinION Reads. Compare against the HLA Groupwise Reference.')
HLAReferenceFilename = sys.argv[1]
inputFilename = sys.argv[2]
outputDirectory = sys.argv[3]
shortFilename = split(inputFilename)[1]
#print('Short Filename:' + shortFilename)
print ('HLA Groupwise Reference:' + HLAReferenceFilename)
print ('MinION Read Input file:' + inputFilename)
print ('Output directory:' + outputDirectory)
print ('Creating output files.')
sortedAReadsOutputFilename = join(outputDirectory ,
shortFilename.replace('.fasta','_HLA_A_Reads.fasta'))
sortedBReadsOutputFilename = join(outputDirectory ,
shortFilename.replace('.fasta','_HLA_B_Reads.fasta'))
sortedCReadsOutputFilename = join(outputDirectory ,
shortFilename.replace('.fasta','_HLA_C_Reads.fasta'))
unsortedReadsOutputFilename = join(outputDirectory ,
shortFilename.replace('.fasta','_Unsorted_Reads.fasta'))
sortResultsOutputFilename = join(outputDirectory ,
shortFilename.replace('.fasta','_Sort_Results.txt'))
sortedAReadsOutput = createOutputFile(sortedAReadsOutputFilename)
sortedBReadsOutput = createOutputFile(sortedBReadsOutputFilename)
sortedCReadsOutput = createOutputFile(sortedCReadsOutputFilename)
unsortedReadsOutput = createOutputFile(unsortedReadsOutputFilename)
sortResultsOutput = createOutputFile(sortResultsOutputFilename)
# Write a canu script to align these reads.
# canu -p 28884R9Final -d /minion/TorqueShare/TestCanu/July21_28884R9/final/output -s /minion/TorqueShare/TestCanu/specfiles/HLA.spec contigFilter="2 1000 1.0 1.0 2" -nanopore-raw /minion/TorqueShare/TestCanu/inputData/July21BarcodedReads/28884R9.fastq
CanuOutputFileName = join(outputDirectory ,
shortFilename.replace('.fasta','_CanuAlignmentScript.sh'))
CanuOutput = createOutputFile(CanuOutputFileName)
CanuOutput.write('canu' +
' -p ' + shortFilename.replace('.fasta','_HLA_A_Reads') +
' -d ' + join(outputDirectory , 'HLA_A_Alignment') +
' -s /minion/TorqueShare/TestCanu/specfiles/HLA.spec' +
' contigFilter=\"2 1000 1.0 1.0 2\"' +
' -nanopore-raw ' + sortedAReadsOutputFilename +
'\n\n' )
CanuOutput.write('canu' +
' -p ' + shortFilename.replace('.fasta','_HLA_B_Reads') +
' -d ' + join(outputDirectory , 'HLA_B_Alignment') +
' -s /minion/TorqueShare/TestCanu/specfiles/HLA.spec' +
' contigFilter=\"2 1000 1.0 1.0 2\"' +
' -nanopore-raw ' + sortedBReadsOutputFilename +
'\n\n' )
CanuOutput.write('canu' +
' -p ' + shortFilename.replace('.fasta','_HLA_C_Reads') +
' -d ' + join(outputDirectory , 'HLA_C_Alignment') +
' -s /minion/TorqueShare/TestCanu/specfiles/HLA.spec' +
' contigFilter=\"2 1000 1.0 1.0 2\"' +
' -nanopore-raw ' + sortedCReadsOutputFilename +
'\n\n' )
CanuOutput.close()
#This script must be executable.
#os.chmod(CanuOutputFileName, 0777)
print ('Parsing input fasta file.')
minionReadRecords = SeqIO.parse(inputFilename, "fasta")
readCount = len(list(SeqIO.parse(inputFilename, "fasta")))
print (str(readCount) + ' reads found in input.')
sortResultsOutput.write('HLA Groupwise Reference:' + HLAReferenceFilename + '\n')
sortResultsOutput.write('MinION Read Input file:' + inputFilename + '\n')
sortResultsOutput.write('Output directory:' + outputDirectory + '\n')
sortResultsOutput.write('Read Count: ' + str(readCount) + '\n')
aReadCount = 0
bReadCount = 0
cReadCount = 0
unsortedReadCount = 0
CreateBlastDatabase(HLAReferenceFilename)
# Each record represents an HLA element in the input fasta file.
for index, record in enumerate(minionReadRecords):
currentReadID = str(record.id)
#print ('Read ID:' + currentReadID)
currentSequence = str(record.seq)
#print ('Read Sequence:' + currentSequence)
print ('Sorting Read (' + str(index) + '/' + str(readCount) + ') : ' + currentReadID)
#Blast the read against the database
# I can pass the sequence directly into the blast from stdio. Pipe the sequence into blast.
# Make the commandline look like this:
# echo -e ">Name\nGGTTGAATG" | blastn -outfmt 0 -db /home/ben/MUMCScripts/BlastMinIONReads/inputData/SimpleReference.fasta -evalue 0.001
blastn_cline = NcbiblastnCommandline(db=HLAReferenceFilename, evalue=0.001, outfmt=0)
commandLineQuery = 'echo -e \'>' + currentReadID + '\n' + currentSequence + '\' | ' + str(blastn_cline)
# check_output will execute the commandline, wait for it to finish, and capture the output.
blastStdIOText = check_output(commandLineQuery, shell=True)
blastStdIOTextSplit = str(blastStdIOText).split('\n')
blastHits = []
# Store Blast hits
for index, line in enumerate(blastStdIOTextSplit):
# The lines in the blast results for match statistics begin with 'HLA_'
try:
if(line[0:4] == 'HLA_'):
# A trick to eliminate duplicate whitespace from the line string.
line = " ".join(line.split())
#print line
blastResultTokens = line.split(' ')
currentBlastResult = Blast_Result()
currentBlastResult.AlleleGroupName = str(blastResultTokens[0])
currentBlastResult.blastScore = float(blastResultTokens[1])
currentBlastResult.Gene = currentBlastResult.AlleleGroupName[4:5]
#print('Group:' + currentBlastResult.AlleleGroupName)
#print('Score:' + str(currentBlastResult.blastScore))
#print('Gene:' + currentBlastResult.Gene)
blastHits.append(currentBlastResult)
except Exception:
# Top Level exception handling like a pro.
# This is not really doing anything.
print ('Had a problem parsing a blast result. I will disregard this blast hit:' + line)
print sys.exc_info()[0]
print sys.exc_info()[1]
print sys.exc_info()[2]
#raise
#Choose the top three matches and print them.
print('Top three gene matches:')
if(len(blastHits) > 2):
for i in range(0,3):
print(blastHits[i].AlleleGroupName + ' : ' +
blastHits[i].Gene + ' : ' +
str(blastHits[i].blastScore) )
#Sort the Read by HLA Gene
#I say that if the top three blast matches have the same gene, I know what gene the read comes from.
sortGene = 'None'
if(len(blastHits) > 2):
if(blastHits[0].Gene == blastHits[1].Gene and blastHits[1].Gene == blastHits[2].Gene):
sortGene = blastHits[0].Gene
print ('I\'m confident this read comes from HLA-' + sortGene + ':' + currentReadID)
else:
print ('I\'m unable to sort this read:' + currentReadID)
pass
# Write the fasta to a sorted output file.
if(sortGene == 'A'):
aReadCount += 1
sortedAReadsOutput.write('>' + currentReadID + '\n')
sortedAReadsOutput.write( currentSequence + '\n')
elif(sortGene == 'B'):
bReadCount += 1
sortedBReadsOutput.write('>' + currentReadID + '\n')
sortedBReadsOutput.write( currentSequence + '\n')
elif(sortGene == 'C'):
cReadCount += 1
sortedCReadsOutput.write('>' + currentReadID + '\n')
sortedCReadsOutput.write( currentSequence + '\n')
else:
unsortedReadCount += 1
unsortedReadsOutput.write('>' + currentReadID + '\n')
unsortedReadsOutput.write( currentSequence + '\n')
sortResultsOutput.write('HLA-A : ' + str(aReadCount) + '\n')
sortResultsOutput.write('HLA-B : ' + str(bReadCount) + '\n')
sortResultsOutput.write('HLA-C : ' + str(cReadCount) + '\n')
sortResultsOutput.write('Unsorted : ' + str(unsortedReadCount) + '\n')
sortedAReadsOutput.close()
sortedBReadsOutput.close()
sortedCReadsOutput.close()
unsortedReadsOutput.close()
sortResultsOutput.close()
#Lets just try to execute the canu script for this one:
print('Executing canu alignment script.')
#subprocess.call(CanuOutputFileName, shell=True)
"""
# TODO: I should detecte forward/reverse matches, and revcom th e sqeuence.
# I have code to revcom the sequence in search_barcode
def BlastMinionReadsAgainstAPDRef():
print ('Time to sort our MinION Reads. Compare against the APD Allele Reference.')
# The full blast output can be several gigabytes worth of text. Probably not worth writing.
printFullBlastOutput = False
# TODO : Method to read parameters, this is kind of crappy. I know better.
HLAReferenceFilename = sys.argv[1]
inputFilename = sys.argv[2]
outputDirectory = sys.argv[3]
shortFilename = split(inputFilename)[1]
#print('Short Filename:' + shortFilename)
print ('HLA APD Reference:' + HLAReferenceFilename)
print ('MinION Read Input file:' + inputFilename)
print ('Output directory:' + outputDirectory)
global FileOutputFormat
if (".fasta" == inputFilename[-6:] or ".fa" == inputFilename[-3:]):
FileOutputFormat = "fasta"
elif (".fastq"== inputFilename[-6:] or ".fq" == inputFilename[-3:]):
FileOutputFormat = "fastq"
sortResultsOutput = createOutputFile(join(outputDirectory ,
(shortFilename + '.SortResults.txt')))
if(printFullBlastOutput):
fullBlastOutput = createOutputFile(join(outputDirectory ,
(shortFilename + '.FullBlastOutput.txt')))
shortBlastOutput = createOutputFile(join(outputDirectory ,
(shortFilename + '.ShortBlastOutput.txt')))
finalBlastSummaryOutput = createOutputFile(join(outputDirectory ,
(shortFilename + '.BlastSummary.txt')))
print ('Parsing input file. It\'s format is ' + FileOutputFormat)
parsedReads = SeqIO.parse(inputFilename, FileOutputFormat)
minionReadRecords = enumerate(parsedReads)
readCount = len(list(SeqIO.parse(inputFilename, FileOutputFormat)))
print (str(readCount) + ' reads found in input.')
finalBlastSummaryOutput.write('HLA APD Reference:' + HLAReferenceFilename + '\n')
finalBlastSummaryOutput.write('MinION Read Input file:' + inputFilename + '\n')
finalBlastSummaryOutput.write('Output directory:' + outputDirectory + '\n')
finalBlastSummaryOutput.write('Read Count: ' + str(readCount) + '\n')
CreateBlastDatabase(HLAReferenceFilename)
blastResultSets = []
# Ineed to replace this for loop with
print ('Length of the enumerated read list:' + str(readCount))
# TODO: I wonder if i can thread this blast stuff. They all use the same database so probably not?
# Maybe reads can be sorted in batches to be faster, because this takes forever.
# Each record represents an HLA element in the input fasta file.
for index, record in minionReadRecords:
currentReadID = str(record.id)
#print ('Read ID:' + currentReadID)
currentSequence = str(record.seq)
#print ('Read Sequence:' + currentSequence)
print ('Sorting Read (' + str(index) + '/' + str(readCount) + ') : ' + currentReadID)
currentBlastResultSet = Blast_Result_Set()
currentBlastResultSet.readID = currentReadID
currentBlastResultSet.readObject = record
#print ('This fresh BlastResultSet should have 0 length blast results:' + str(len(currentBlastResultSet.BlastResults)))
if(printFullBlastOutput):
fullBlastOutput.write('\n\nBlasting Read:' + currentReadID + '\n')
shortBlastOutput.write('\n\nBlasting Read:' + currentReadID + '\n')
#Blast the read against the database
# I can pass the sequence directly into the blast from stdio. Pipe the sequence into blast.
# Make the commandline look like this:
# echo -e ">Name\nGGTTGAATG" | blastn -outfmt 0 -db /home/ben/MUMCScripts/BlastMinIONReads/inputData/SimpleReference.fasta -evalue 0.001
blastn_cline = NcbiblastnCommandline(db=HLAReferenceFilename, evalue=0.001, outfmt=0)
commandLineQuery = 'echo -e \'>' + currentReadID + '\n' + currentSequence + '\' | ' + str(blastn_cline)
# check_output will execute the commandline, wait for it to finish, and capture the output.
blastStdIOText = check_output(commandLineQuery, shell=True)
blastStdIOTextSplit = str(blastStdIOText).split('\n')
blastResultList = list(blastStdIOTextSplit)
blastResultLineCount = len(blastResultList)
#print ('Blast Result Found, Line Count:' + str(blastResultLineCount))
# Store Blast hits
#for index, line in enumerate(blastStdIOTextSplit):
blastResultLoopIndexer = 0
while(blastResultLoopIndexer < blastResultLineCount):
line = blastResultList[blastResultLoopIndexer]
#print(line)
if(printFullBlastOutput):
fullBlastOutput.write(line + '\n')
if('>' in line):
# The > character signifies this is the fasta header for the blast hit.
# This line contains the Allele name, which we can parse out.
alleleNameLine = line
shortBlastOutput.write(line)
#print('AlleleNameFound:' + alleleNameLine)
#print('Creating a brand new Blast Result.')
currentBlastResult = Blast_Result()
#print('Fresh Blast Result, should have 0 score ' + str(currentBlastResult.BlastScore))
currentBlastResult.parseNomenclatureLine(alleleNameLine)
foundScore = False
while(not foundScore):
blastResultLoopIndexer += 1
line = blastResultList[blastResultLoopIndexer]
if(printFullBlastOutput):
fullBlastOutput.write(line + '\n')
if('Score =' in line):
scoreLine = line
shortBlastOutput.write(line + '\n')
#print('ScoreFound:' + scoreLine)
#Assign the blast score
currentBlastResult.parseScoreLine(scoreLine)
foundScore = True
currentBlastResultSet.BlastResults.append(currentBlastResult)
#print('Stored a blast result in my blast result set. Current count = ' + str(len(currentBlastResultSet.BlastResults)))
blastResultLoopIndexer += 1
# Add the current blast result to this read's result set
blastResultSets.append(currentBlastResultSet)
#print('Stored a blast result set in my list of blast result sets. Current count = ' + str(len(blastResultSets)))
#Loop through blastResultSets to print out some stats on each.
for blastResultSet in blastResultSets:
blastResultSet.assignReadToGeneAndAlleleGroup()
blastResultSet.printResultSummary(sortResultsOutput)
printSortedFastaFiles(blastResultSets, outputDirectory, finalBlastSummaryOutput)
sortResultsOutput.close()
if(printFullBlastOutput):
fullBlastOutput.close()
shortBlastOutput.close()
finalBlastSummaryOutput.close()
#Lets just try to execute the canu script for this one:
#print('Executing canu alignment script.')
#subprocess.call(CanuOutputFileName, shell=True)
|
bmatern/punkin-chunker
|
src/deprecated/Blast_Minion_Reads.py
|
Python
|
gpl-3.0
| 24,133
|
[
"BLAST"
] |
1144b4e56a27e11358d33eee672d4a7354d384a85edfdd2c77ec6a96d9cce6fc
|
# -*- coding: utf-8 -*-
import numpy as np
from crystals import Crystal
from skued import patterson, powdersim
def test_patterson_output_shape():
"""Test that the output shape is as expected."""
# Simulate a powder pattern first
crystal = Crystal.from_database("vo2-m1")
q = np.linspace(0.2, 10, 1024)
I = powdersim(crystal=crystal, q=q)
radii = np.arange(0.1, 5, 1 / 50)
pairdist = patterson(q=q, I=I, crystal=crystal, radii=radii)
assert radii.shape == pairdist.shape
|
LaurentRDC/scikit-ued
|
skued/tests/test_patterson.py
|
Python
|
gpl-3.0
| 509
|
[
"CRYSTAL"
] |
6616d243db35ba11936244c738c616a3bc7252f067a3502fe44f3ba193841d2c
|
import ast
import _ast
import os
import random
import string
import api
import ast2code
__author__ = 'hiranya'
PREDICATE_SIMILARITY_THRESHOLD = 0.9
PREDICATE_SET_SIMILARITY_THRESHOLD = 0.85
def parse(string):
return ast.parse(string, mode='eval')
class PredicateEvaluator(ast.NodeVisitor):
def evaluate(self, string):
tree = parse(string)
return self.visit(tree)
def visit_Expression(self, node):
return self.visit(node.body)
def visit_Compare(self, node):
left = self.visit(node.left)
op = node.ops[0]
right = self.visit(node.comparators[0])
if isinstance(op, _ast.Gt):
return left > right
elif isinstance(op, _ast.Lt):
return left < right
elif isinstance(op, _ast.GtE):
return left >= right
elif isinstance(op, _ast.LtE):
return left <= right
elif isinstance(op, _ast.Eq):
return left == right
elif isinstance(op, _ast.NotEq):
return left != right
elif isinstance(op, _ast.In):
return left in right
elif isinstance(op, _ast.NotIn):
return left not in right
elif isinstance(op, _ast.Is):
return left is right
elif isinstance(op, _ast.IsNot):
return left not in right
def visit_Num(self, node):
return node.n
def visit_Str(self, node):
return node.s
def visit_List(self, node):
items = []
for item in node.elts:
items.append(self.visit(item))
return items
def visit_Tuple(self, node):
items = []
for item in node.elts:
items.append(self.visit(item))
return tuple(items)
def visit_Dict(self, node):
keys = node.keys
values = node.values
items = {}
for i in range(0, len(keys)):
items[keys[i]] = values[i]
return items
def visit_Attribute(self, node):
value = self.visit(node.value)
return getattr(value, node.attr)
def visit_Name(self, node):
if node.id == 'True':
return True
return None
def visit_BinOp(self, node):
left = self.visit(node.left)
op = node.op
right = self.visit(node.right)
if isinstance(op, _ast.Add):
return left + right
elif isinstance(op, _ast.Sub):
return left - right
elif isinstance(op, _ast.Mult):
return left * right
elif isinstance(op, _ast.Div):
return left / right
elif isinstance(op, _ast.Mod):
return left % right
elif isinstance(op, _ast.Pow):
return left ** right
def visit_BoolOp(self, node):
result = None
op = node.op
for value in node.values:
if result is None:
result = self.visit(value)
elif isinstance(op, _ast.And):
result = result and self.visit(value)
else:
result = result or self.visit(value)
return result
def visit_UnaryOp(self, node):
result = self.visit(node.operand)
op = node.op
if isinstance(op, _ast.Not):
return not result
def visit_Call(self, node):
function = node.func.id
args = node.args
if function == 'len':
return len(self.visit(args[0]))
elif function == 'forall':
items = self.visit(args[1])
for item in items:
if not self.visit(args[2]):
return False
return True
elif function == 'exists':
items = self.visit(args[1])
for item in items:
if self.visit(args[2]):
return True
return False
elif function == 'implies':
# also support follows/iff/niff
left = bool(self.visit(args[0]))
right = bool(self.visit(args[1]))
return left and right
class ASTComparator(ast.NodeVisitor):
def compare(self, item1, item2):
if isinstance(item1, str):
left = parse(item1)
else:
left = item1
if isinstance(item2, str):
right = parse(item2)
else:
right = item2
self.current_right = right
return self.visit(left)
def visit_Expression(self, node):
if not isinstance(self.current_right, _ast.Expression):
print 'expression mismatch'
return False
left = node.body
self.current_right = self.current_right.body
return self.visit(left)
def visit_BinOp(self, node):
if not isinstance(self.current_right, _ast.BinOp):
return False
left_op = node.op
right_op = self.current_right.op
if not isinstance(left_op, type(right_op)):
print 'binop op mismatch'
return False
temp = self.current_right
left_arg = node.left
self.current_right = temp.left
left_2_left = False
if not self.visit(left_arg):
self.current_right = temp.right
if not self.visit(left_arg):
print 'binop left arg mismatch'
return False
else:
left_2_left = True
right_arg = node.right
if left_2_left:
self.current_right = temp.right
else:
self.current_right = temp.left
return self.visit(right_arg)
def visit_BoolOp(self, node):
if not isinstance(self.current_right, _ast.BoolOp):
print 'boolop mismatch'
return False
left_op = node.op
right_op = self.current_right.op
if not isinstance(left_op, type(right_op)):
print 'boolop op mismatch'
return False
left_values = node.values
right_values = self.current_right.values
if len(left_values) != len(right_values):
print 'boolop value count mismatch'
return False
matches = []
for i in range(0, len(left_values)):
for j in range(0, len(right_values)):
self.current_right = right_values[j]
if self.visit(left_values[i]):
matches.append(True)
break
if len(matches) != len(left_values):
print 'one or more boolop values did not match'
return False
return True
def visit_Compare(self, node):
if not isinstance(self.current_right, _ast.Compare):
print 'comparison mismatch'
return False
left_op = node.ops[0]
right_op = self.current_right.ops[0]
opposite_op = False
if not isinstance(left_op, type(right_op)):
if isinstance(left_op, _ast.Lt) and isinstance(right_op, _ast.Gt):
opposite_op = True
elif isinstance(left_op, _ast.Gt) and isinstance(right_op, _ast.Lt):
opposite_op = True
elif isinstance(left_op, _ast.LtE) and isinstance(right_op, _ast.GtE):
opposite_op = True
elif isinstance(left_op, _ast.GtE) and isinstance(right_op, _ast.LtE):
opposite_op = True
else:
return False
elif isinstance(left_op, _ast.Eq) or isinstance(left_op, _ast.NotEq):
opposite_op = True
temp = self.current_right
if opposite_op:
left_arg = node.comparators[0]
self.current_right = temp.left
if not self.visit(left_arg):
print 'comparison arg mismatch'
return False
right_arg = node.left
self.current_right = temp.comparators[0]
return self.visit(right_arg)
else:
left_arg = node.left
self.current_right = temp.left
if not self.visit(left_arg):
print 'comparison left arg mismatch'
return False
right_arg = node.comparators[0]
self.current_right = temp.comparators[0]
return self.visit(right_arg)
def visit_Call(self, node):
if not isinstance(self.current_right, _ast.Call):
print 'function call mismatch'
return False
temp = self.current_right
left_name = node.func
self.current_right = temp.func
if not self.visit(left_name):
print 'function name mismatch'
return False
left_args = node.args
right_args = temp.args
if len(left_args) != len(right_args):
print 'function arg count mismatch'
return False
for i in range(0, len(left_args)):
self.current_right= right_args[i]
if not self.visit(left_args[i]):
print 'function arg mismatch'
return False
return True
def visit_Name(self, node):
if not isinstance(self.current_right, _ast.Name):
print 'name mismatch'
return False
if node.id != self.current_right.id:
print 'name id mismatch'
return False
return True
def visit_Num(self, node):
if not isinstance(self.current_right, _ast.Num):
print 'number mismatch'
return False
if node.n != self.current_right.n:
print 'number value mismatch'
return False
return True
def visit_Attribute(self, node):
if not isinstance(self.current_right, _ast.Attribute):
print 'attribute mismatch'
return False
if node.attr != self.current_right.attr:
print 'attr value mismatch'
return False
self.current_right = self.current_right.value
return self.visit(node.value)
def visit_Str(self, node):
if not isinstance(self.current_right, _ast.Str):
print 'string mismatch'
return False
if node.s != self.current_right.s:
print 'string value mismatch'
return False
return True
def visit_List(self, node):
if not isinstance(self.current_right, _ast.List):
print 'list mismatch'
return False
left_elements = node.elts
right_elements = self.current_right.elts
if len(left_elements) != len(right_elements):
print 'list length mismatch'
return False
for i in range(0, len(left_elements)):
self.current_right = right_elements[i]
if not self.visit(left_elements[i]):
print 'list member mismatch'
return False
return True
def visit_Tuple(self, node):
if not isinstance(self.current_right, _ast.Tuple):
print 'tuple mismatch'
return False
left_elements = node.elts
right_elements = self.current_right.elts
if len(left_elements) != len(right_elements):
print 'tuple length mismatch'
return False
for i in range(0, len(left_elements)):
self.current_right = right_elements[i]
if not self.visit(left_elements[i]):
print 'tuple member mismatch'
return False
return True
def visit_Dict(self, node):
if not isinstance(self.current_right, _ast.Dict):
print 'dict mismatch'
return False
temp = self.current_right
left_keys = node.keys
right_keys = temp.keys
if len(left_keys) != len(right_keys):
print 'dict length mismatch'
return False
for i in range(0, len(left_keys)):
self.current_right = right_keys[i]
if not self.visit(left_keys[i]):
print 'dict key mismatch'
return False
left_values = node.values
right_values = temp.values
for i in range(0, len(left_values)):
self.current_right = right_values[i]
if not self.visit(left_values[i]):
print 'dict value mismatch'
return False
return True
class ASTSimilarityChecker(ast.NodeVisitor):
def get_similarity(self, item1, item2):
if isinstance(item1, str):
left_tree = parse(item1)
else:
left_tree = item1
if isinstance(item2, str):
right_tree = parse(item2)
else:
right_tree = item2
self.left = []
self.shared = []
self.matcher = NodeMatcher(right_tree)
self.visit(left_tree)
S = len(self.shared)
L = len(self.left)
R = len(self.matcher.nodes)
similarity = (2.0 * S) / (2.0 * S + L + R)
return similarity
def visit(self, node):
self.generic_visit(node)
if self.matcher.match(node):
self.shared.append(node)
else:
self.left.append(node)
class NodeEnumerator(ast.NodeVisitor):
def get_node_list(self, tree):
self.nodes = []
self.visit(tree)
return self.nodes
def visit(self, node):
self.generic_visit(node)
self.nodes.append(node)
class NodeMatcher():
def __init__(self, right):
enumerator = NodeEnumerator()
self.nodes = enumerator.get_node_list(right)
self.matched_nodes = []
def match(self, target):
match = None
for node in self.nodes:
if not isinstance(target, type(node)):
continue
elif hasattr(self, 'visit_' + type(node).__name__):
method = getattr(self, 'visit_' + type(node).__name__)
if method(node, target):
match = node
else:
match = node
if match is not None:
self.matched_nodes.append(match)
self.nodes.remove(match)
return True
return False
def visit_Call(self, node, target):
return node.func.id == target.func.id
def visit_Num(self, node, target):
return node.n == target.n
def visit_Str(self, node, target):
return node.s == target.s
class PredicateRandomizer(ast.NodeTransformer):
def randomize(self):
return bool(random.randint(0,1))
def visit_Num(self, node):
if self.randomize():
number = random.random() * node.n
return ast.copy_location(_ast.Num(n=number), node)
else:
return node
def visit_Str(self, node):
if self.randomize():
n = len(node.s)
s = ''.join(random.choice(string.ascii_uppercase + string.lowercase + string.digits) for x in range(n))
return ast.copy_location(_ast.Str(s=s), node)
else:
return node
def visit_Eq(self, node):
if self.randomize():
return ast.copy_location(_ast.NotEq(), node)
else:
return node
def visit_NotEq(self, node):
if self.randomize():
return ast.copy_location(_ast.Eq(), node)
else:
return node
def visit_In(self, node):
if self.randomize():
return ast.copy_location(_ast.NotIn(), node)
else:
return node
def visit_NotIn(self, node):
if self.randomize():
return ast.copy_location(_ast.In(), node)
else:
return node
def visit_Gt(self, node):
return self.get_random_comparator(node)
def visit_GtE(self, node):
return self.get_random_comparator(node)
def visit_Lt(self, node):
return self.get_random_comparator(node)
def visit_LtE(self, node):
return self.get_random_comparator(node)
def get_random_comparator(self, node):
if self.randomize():
comp = random.randint(1,4)
if comp == 1:
op = _ast.Lt()
elif comp == 2:
op = _ast.LtE()
elif comp == 3:
op = _ast.Gt()
else:
op = _ast.GtE()
return ast.copy_location(op, node)
else:
return node
def visit_Tuple(self, node):
if self.randomize():
elements = [self.visit(node.elts[0])]
for element in node.elts[1:]:
if self.randomize():
elements.append(self.visit(element))
if self.randomize():
for element in node.elts:
if self.randomize():
elements.append(self.visit(element))
return ast.copy_location(_ast.Tuple(elts=elements), node)
else:
return node
def visit_BoolOp(self, node):
if self.randomize():
if self.randomize():
op = ast.And()
else:
op = ast.Or()
values = []
for value in node.values:
values.append(self.visit(value))
return ast.copy_location(_ast.BoolOp(op=op, values=values), node)
else:
return node
def visit_Name(self, node):
if self.randomize():
if node.id == 'forall':
return ast.copy_location(_ast.Name(id='exists'), node)
elif node.id == 'exists':
return ast.copy_location(_ast.Name(id='forall'), node)
return node
def parse_predicate_set(string_set):
tree_set = []
for string in string_set:
tree_set.append(parse(string))
return tree_set
def pre_process_ast_set(ast_set):
tree_set = []
for item in ast_set:
if isinstance(item.body, _ast.BoolOp):
for subtree in item.body.values:
expression = _ast.Expression()
expression.body = subtree
tree_set.append(expression)
else:
tree_set.append(item)
return tree_set
def compare_predicate_sets(set1, set2):
temp_set1 = parse_predicate_set(set1)
temp_set2 = parse_predicate_set(set2)
tree_set1 = pre_process_ast_set(temp_set1)
tree_set2 = pre_process_ast_set(temp_set2)
temp_tree_set2 = []
for tree in tree_set2:
temp_tree_set2.append(tree)
checker = ASTSimilarityChecker()
similarities = {}
matches = {}
for tree1 in tree_set1:
for tree2 in temp_tree_set2:
sim = checker.get_similarity(tree1, tree2)
if sim >= PREDICATE_SIMILARITY_THRESHOLD:
prev_sim = similarities.get(tree1)
if prev_sim is None or sim > prev_sim:
similarities[tree1] = sim
matches[tree1] = tree2
if matches.has_key(tree1):
temp_tree_set2.remove(matches[tree1])
S = len(similarities)
L = len(tree_set1) - S
R = len(tree_set2) - S
if S + L + R == 0:
return -1
else:
return (2.0 * S) / (2.0 * S + L + R)
def compare_operations(api1, op1, api2, op2):
methods = sorted([op1.method, op2.method])
if methods[0] == methods[1] or methods == ['GET','POST'] or methods == ['POST','PUT']:
sim1 = compare_predicate_sets(op1.get_pre_conditions(api1), op2.get_pre_conditions(api2))
sim2 = compare_predicate_sets(op1.get_post_conditions(api1), op2.get_post_conditions(api2))
print sim1, sim2
if sim1 >= PREDICATE_SET_SIMILARITY_THRESHOLD and sim2 >= PREDICATE_SET_SIMILARITY_THRESHOLD:
return True
elif sim1 >= PREDICATE_SET_SIMILARITY_THRESHOLD and sim2 == -1:
return True
elif sim1 == -1 and sim2 >= PREDICATE_SET_SIMILARITY_THRESHOLD:
return True
return False
def randomize_predicate(predicate):
randomizer = PredicateRandomizer()
tree = randomizer.visit(parse(predicate))
return ast2code.to_source(tree)
def randomize_operation(api_def, op):
if op.input and op.input.type:
data_type = op.input.type.type
if isinstance(data_type, api.CustomTypeRef):
type_def = api_def.get_type_by_name(data_type.get_reference_name())
constraints = []
for constraint in type_def.constraints:
randomize = random.randint(0,2) == 1
if randomize:
constraints.append(randomize_predicate(constraint))
else:
constraints.append(constraint)
type_def.constraints = constraints
if op.output.type:
data_type = op.output.type.type
if isinstance(data_type, api.CustomTypeRef):
type_def = api_def.get_type_by_name(data_type.get_reference_name())
constraints = []
for constraint in type_def.constraints:
randomize = random.randint(0,2) == 1
if randomize:
constraints.append(randomize_predicate(constraint))
else:
constraints.append(constraint)
type_def.constraints = constraints
requires = []
for condition in op.requires:
randomize = random.randint(0,2) == 1
if randomize:
requires.append(randomize_predicate(condition))
else:
requires.append(condition)
op.requires = requires
ensures = []
for condition in op.ensures:
randomize = random.randint(0,2) == 1
if randomize:
ensures.append(randomize_predicate(condition))
else:
ensures.append(condition)
op.ensures = ensures
return op
def randomize_api(api_def, name, output_dir):
api_def.name = name
for resource in api_def.resources:
for op in resource.operations:
randomize_operation(api_def, op)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
output = open(os.path.join(output_dir, name + '.json'), 'w')
output.write(api_def.serialize_json())
output.close()
def compare_predicates(p1, p2):
checker = ASTSimilarityChecker()
return checker.get_similarity(p1,p2)
if __name__ == '__main__':
# for i in range(0, 100):
# api_def = api.parse('/Users/hiranya/Projects/api-desc/sandbox/jaxrs-test/starbucks/starbucks3.json')
# randomize_api(api_def, 'random' + str(i), '/Users/hiranya/Projects/api-desc/sandbox/jaxrs-test/random')
# print 'DONE'
k = 1
api1 = api.parse('/Users/hiranya/Projects/api-desc/sandbox/jaxrs-test/starbucks/starbucks3.json')
for i in [90]:
api2 = api.parse('/Users/hiranya/Projects/api-desc/sandbox/jaxrs-test/random/random' + str(i) + '.json')
for resource1 in api1.resources:
for op1 in resource1.operations:
for resource2 in api2.resources:
for op2 in resource2.operations:
print
for c in op1.get_pre_conditions(api1):
print c
print
for c in op2.get_pre_conditions(api2):
print c
print
for c in op1.get_post_conditions(api1):
print c
print
for c in op2.get_post_conditions(api2):
print c
if compare_operations(api1, op1, api2, op2):
print k, api1.name, api2.name, '- Match ****************'
k += 1
|
hiranya911/rest-coder
|
python-lib/predicate_parser.py
|
Python
|
apache-2.0
| 20,533
|
[
"VisIt"
] |
b0fcf70daf17a93fc7ab6161d04a1431e5b22de9e4f95cba5a430b216090c943
|
from __future__ import print_function, division
import unittest, numpy as np
from pyscf import gto, scf
from pyscf.nao import gw as gw_c
mol = gto.M( verbose = 1, atom = '''F 0.0 0.0 0.0''', basis = 'aug-cc-pvdz', spin = 1, )
gto_mf = scf.UHF(mol)
e_tot = gto_mf.kernel()
class KnowValues(unittest.TestCase):
def test_0068_F_atom(self):
""" Spin-resolved case GW procedure. """
gw = gw_c(mf=gto_mf, gto=mol, verbosity=0, niter_max_ev=16, rescf=True, kmat_algo='dp_vertex_loops_sm')
self.assertEqual(gw.nspin, 2)
gw.kernel_gw()
#gw.report()
np.savetxt('eigvals_gw_pyscf_f_0068.txt', gw.mo_energy_gw[0,:,:].T)
ev_ref = np.loadtxt('eigvals_gw_pyscf_f_0068.txt-ref').T
for n2e,n2r in zip(gw.mo_energy_gw[0], ev_ref):
for e,r in zip(n2e,n2r): self.assertAlmostEqual(e, r)
if __name__ == "__main__": unittest.main()
|
gkc1000/pyscf
|
pyscf/nao/test/test_0068_gw_f_atom.py
|
Python
|
apache-2.0
| 864
|
[
"PySCF"
] |
c9c8214b50360a177d2646dbc9d801fa0eae1391959509774d51ad200aad9a86
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2018 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import collections
import numpy as np
class QCAspect(collections.namedtuple('QCAspect', 'lbl units data comment doi glossary')):
"""Facilitates the storage of quantum chemical results by labeling them with basic metadata."""
def __new__(cls, lbl, units, data, comment='', doi=None, glossary=''):
return super(QCAspect, cls).__new__(cls, lbl, units, data, comment, doi, glossary)
def __str__(self, label=''):
width = 40
text = []
text.append('-' * width)
text.append('{:^{width}}'.format('QCAspect ' + self.lbl, width=width))
if label:
text.append('{:^{width}}'.format(label))
text.append('-' * width)
text.append('Data: {}'.format(self.data))
text.append('Units: [{}]'.format(self.units))
text.append('doi: {}'.format(self.doi))
text.append('Comment: {}'.format(self.comment))
text.append('Glossary: {}'.format(self.glossary))
text.append('-' * width)
return ('\n'.join(text))
def to_dict(self):
dicary = dict(self._asdict()) # dict, not OrderedDict
for d in ['doi', 'comment', 'glossary']:
dicary.pop(d)
if isinstance(self.data, (np.ndarray, np.number)):
if self.data.dtype == np.complex:
dicary['data'] = [dicary['data'].real.tolist(), dicary['data'].imag.tolist()]
else:
dicary['data'] = dicary['data'].tolist()
elif isinstance(self.data, (complex, np.complex)):
dicary['data'] = [self.data.real, self.data.imag]
return dicary
|
amjames/psi4
|
psi4/driver/qcdb/datastructures.py
|
Python
|
lgpl-3.0
| 2,525
|
[
"Psi4"
] |
095a81742959a8ae440af2bd48f6789b6e936322bdb74c163b93e1944abaaf12
|
# Copyright (C) 2017 Allen Li
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from setuptools import setup
def find_version(path):
with open(path) as f:
text = f.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
text, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='mir.anidb',
version=find_version('mir/anidb/__init__.py'),
description='AniDB API',
long_description='',
keywords='',
url='https://github.com/darkfeline/mir.anidb',
author='Allen Li',
author_email='darkfeline@felesatra.moe',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
],
packages=['mir.anidb'],
install_requires=[
'requests~=2.23.0',
],
)
|
darkfeline/mir.anidb
|
setup.py
|
Python
|
apache-2.0
| 1,514
|
[
"MOE"
] |
d7aace5e1ccb5a34e1c90a602171849fee2da11ecd13407386ce88ee761c3045
|
from csv import reader
import random
import time
def strTimeProp(start, end, format, prop):
stime = time.mktime(time.strptime(start, format))
etime = time.mktime(time.strptime(end, format))
ptime = stime + prop * (etime - stime)
return time.strftime(format, time.localtime(ptime))
def randomDate(start, end, prop):
return strTimeProp(start, end, '%Y-%m-%d', prop)
def getTestType():
types = ['CBC', 'RBC', 'WBC', 'P', 'Hglob', 'Hcrit', 'MCV', 'BMP', 'Gluc', 'C', 'E+', 'BE', 'Trop', 'Creat', 'Lipo', 'BC']
index = random.randrange(len(types))
return types[index]
def getRandomRegion():
regions = ['SW','NE','MW','W','NW','SE'];
index = random.randrange(len(regions))
return regions[index]
text = "INSERT INTO visit (`network`, `visitDate`, `testType`, `patient$id`) VALUES \n"
output = ""
startDate = "2000-01-01"
endDate = "2017-10-30"
row = "\t('{}',DATE '{}', '{}', {});\n"
for i in range(100000):
network = getRandomRegion()
visitDate = randomDate(startDate, endDate, random.random())
testType = getTestType()
patient = random.randrange(2000)+1
output = output+text+row.format(network, visitDate, testType, patient)
print output
|
tburgebeckley/phlebotomy
|
p4/pyscript/buildVisit.py
|
Python
|
gpl-3.0
| 1,223
|
[
"VisIt"
] |
a4e7d7eb5b873862e7bc85cec90d10a99d286994725db66782f74e0ac8c8152c
|
#Copyright (c) 2008 Erik Tollerud (etolleru@uci.edu)
from statsmodels.compat.python import zip
import numpy as np
from math import pi
class Pca(object):
"""
A basic class for Principal Component Analysis (PCA).
p is the number of dimensions, while N is the number of data points
"""
_colors=('r','g','b','c','y','m','k') #defaults
def __calc(self):
A = self.A
M=A-np.mean(A,axis=0)
N=M/np.std(M,axis=0)
self.M = M
self.N = N
self._eig = None
def __init__(self,data,names=None):
"""
p X N matrix input
"""
A = np.array(data).T
n,p = A.shape
self.n,self.p = n,p
if p > n:
from warnings import warn
warn('p > n - intentional?', RuntimeWarning)
self.A = A
self._origA=A.copy()
self.__calc()
self._colors= np.tile(self._colors,int((p-1)/len(self._colors))+1)[:p]
if names is not None and len(names) != p:
raise ValueError('names must match data dimension')
self.names = None if names is None else tuple([str(n) for n in names])
def getCovarianceMatrix(self):
"""
returns the covariance matrix for the dataset
"""
return np.cov(self.N.T)
def getEigensystem(self):
"""
returns a tuple of (eigenvalues,eigenvectors) for the data set.
"""
if self._eig is None:
res = np.linalg.eig(self.getCovarianceMatrix())
sorti=np.argsort(res[0])[::-1]
res=(res[0][sorti],res[1][:,sorti])
self._eig=res
return self._eig
def getEigenvalues(self):
return self.getEigensystem()[0]
def getEigenvectors(self):
return self.getEigensystem()[1]
def getEnergies(self):
"""
"energies" are just normalized eigenvectors
"""
v=self.getEigenvalues()
return v/np.sum(v)
def plot2d(self,ix=0,iy=1,clf=True):
"""
Generates a 2-dimensional plot of the data set and principle components
using matplotlib.
ix specifies which p-dimension to put on the x-axis of the plot
and iy specifies which to put on the y-axis (0-indexed)
"""
import matplotlib.pyplot as plt
x,y=self.N[:,ix],self.N[:,iy]
if clf:
plt.clf()
plt.scatter(x,y)
vals,evs=self.getEigensystem()
#evx,evy=evs[:,ix],evs[:,iy]
xl,xu=plt.xlim()
yl,yu=plt.ylim()
dx,dy=(xu-xl),(yu-yl)
for val,vec,c in zip(vals,evs.T,self._colors):
plt.arrow(0,0,val*vec[ix],val*vec[iy],head_width=0.05*(dx*dy/4)**0.5,fc=c,ec=c)
#plt.arrow(0,0,vals[ix]*evs[ix,ix],vals[ix]*evs[iy,ix],head_width=0.05*(dx*dy/4)**0.5,fc='g',ec='g')
#plt.arrow(0,0,vals[iy]*evs[ix,iy],vals[iy]*evs[iy,iy],head_width=0.05*(dx*dy/4)**0.5,fc='r',ec='r')
if self.names is not None:
plt.xlabel('$'+self.names[ix]+'/\\sigma$')
plt.ylabel('$'+self.names[iy]+'/\\sigma$')
def plot3d(self,ix=0,iy=1,iz=2,clf=True):
"""
Generates a 3-dimensional plot of the data set and principle components
using mayavi.
ix, iy, and iz specify which of the input p-dimensions to place on each of
the x,y,z axes, respectively (0-indexed).
"""
import enthought.mayavi.mlab as M
if clf:
M.clf()
z3=np.zeros(3)
v=(self.getEigenvectors()*self.getEigenvalues())
M.quiver3d(z3,z3,z3,v[ix],v[iy],v[iz],scale_factor=5)
M.points3d(self.N[:,ix],self.N[:,iy],self.N[:,iz],scale_factor=0.3)
if self.names:
M.axes(xlabel=self.names[ix]+'/sigma',ylabel=self.names[iy]+'/sigma',zlabel=self.names[iz]+'/sigma')
else:
M.axes()
def sigclip(self,sigs):
"""
clips out all data points that are more than a certain number
of standard deviations from the mean.
sigs can be either a single value or a length-p sequence that
specifies the number of standard deviations along each of the
p dimensions.
"""
if np.isscalar(sigs):
sigs=sigs*np.ones(self.N.shape[1])
sigs = sigs*np.std(self.N,axis=1)
n = self.N.shape[0]
m = np.all(np.abs(self.N) < sigs,axis=1)
self.A=self.A[m]
self.__calc()
return n-sum(m)
def reset(self):
self.A = self._origA.copy()
self.__calc()
def project(self,vals=None,enthresh=None,nPCs=None,cumen=None):
"""
projects the normalized values onto the components
enthresh, nPCs, and cumen determine how many PCs to use
if vals is None, the normalized data vectors are the values to project.
Otherwise, it should be convertable to a p x N array
returns n,p(>threshold) dimension array
"""
nonnones = sum([e != None for e in (enthresh,nPCs,cumen)])
if nonnones == 0:
m = slice(None)
elif nonnones > 1:
raise ValueError("can't specify more than one threshold")
else:
if enthresh is not None:
m = self.energies() > enthresh
elif nPCs is not None:
m = slice(None,nPCs)
elif cumen is not None:
m = np.cumsum(self.energies()) < cumen
else:
raise RuntimeError('Should be unreachable')
if vals is None:
vals = self.N.T
else:
vals = np.array(vals,copy=False)
if self.N.T.shape[0] != vals.shape[0]:
raise ValueError("shape for vals doesn't match")
proj = np.matrix(self.getEigenvectors()).T*vals
return proj[m].T
def deproject(self,A,normed=True):
"""
input is an n X q array, where q <= p
output is p X n
"""
A=np.atleast_2d(A)
n,q = A.shape
p = self.A.shape[1]
if q > p :
raise ValueError("q > p")
evinv=np.linalg.inv(np.matrix(self.getEigenvectors()).T)
zs = np.zeros((n,p))
zs[:,:q]=A
proj = evinv*zs.T
if normed:
return np.array(proj.T).T
else:
mns=np.mean(self.A,axis=0)
sds=np.std(self.M,axis=0)
return (np.array(proj.T)*sds+mns).T
def subtractPC(self,pc,vals=None):
"""
pc can be a scalar or any sequence of pc indecies
if vals is None, the source data is self.A, else whatever is in vals
(which must be p x m)
"""
if vals is None:
vals = self.A
else:
vals = vals.T
if vals.shape[1]!= self.A.shape[1]:
raise ValueError("vals don't have the correct number of components")
pcs=self.project()
zpcs=np.zeros_like(pcs)
zpcs[:,pc]=pcs[:,pc]
upc=self.deproject(zpcs,False)
A = vals.T-upc
B = A.T*np.std(self.M,axis=0)
return B+np.mean(self.A,axis=0)
|
hlin117/statsmodels
|
statsmodels/sandbox/pca.py
|
Python
|
bsd-3-clause
| 7,098
|
[
"Mayavi"
] |
64911fbd37183b3939111a05e2cd6c4cd41161d987e8c7a6faa1baf014b8bdd9
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
class Migration(migrations.Migration):
dependencies = [
('visit', '0027_student_key'),
]
operations = [
migrations.AlterField(
model_name='student',
name='key',
field=models.UUIDField(default=uuid.uuid4, unique=True, editable=False),
),
]
|
koebbe/homeworks
|
visit/migrations/0028_auto_20150614_1750.py
|
Python
|
mit
| 437
|
[
"VisIt"
] |
49b2675f2496d291f0c22b106404ab6851975a8136cca0c186148f05a9c92bb7
|
"""
Test the Studio help links.
"""
from unittest import skip
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.studio.asset_index import AssetIndexPageStudioFrontend
from common.test.acceptance.pages.studio.course_info import CourseUpdatesPage
from common.test.acceptance.pages.studio.edit_tabs import PagesPage
from common.test.acceptance.pages.studio.import_export import (
ExportCoursePage,
ExportLibraryPage,
ImportCoursePage,
ImportLibraryPage
)
from common.test.acceptance.pages.studio.index import DashboardPage, HomePage, IndexPage
from common.test.acceptance.pages.studio.library import LibraryPage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.pages.studio.settings_advanced import AdvancedSettingsPage
from common.test.acceptance.pages.studio.settings_certificates import CertificatesPage
from common.test.acceptance.pages.studio.settings_graders import GradingPage
from common.test.acceptance.pages.studio.settings_group_configurations import GroupConfigurationsPage
from common.test.acceptance.pages.studio.textbook_upload import TextbookUploadPage
from common.test.acceptance.pages.studio.users import CourseTeamPage, LibraryUsersPage
from common.test.acceptance.pages.studio.utils import click_css, click_studio_help, studio_help_links
from common.test.acceptance.tests.helpers import (
AcceptanceTest,
assert_nav_help_link,
assert_side_bar_help_link,
url_for_help
)
from common.test.acceptance.tests.studio.base_studio_test import ContainerBase, StudioCourseTest, StudioLibraryTest
def _get_expected_documentation_url(path):
"""
Returns the expected URL for the building and running a course documentation.
"""
return url_for_help('course_author', path)
@attr(shard=20)
class StudioHelpTest(StudioCourseTest):
"""Tests for Studio help."""
def test_studio_help_links(self):
"""Test that the help links are present and have the correct content."""
page = DashboardPage(self.browser)
page.visit()
click_studio_help(page)
links = studio_help_links(page)
expected_links = [{
'href': u'http://docs.edx.org/',
'text': u'edX Documentation',
'sr_text': u'Access documentation on http://docs.edx.org'
}, {
'href': u'https://open.edx.org/',
'text': u'Open edX Portal',
'sr_text': u'Access the Open edX Portal'
}, {
'href': u'https://www.edx.org/course/overview-creating-edx-course-edx-edx101#.VO4eaLPF-n1',
'text': u'Enroll in edX101',
'sr_text': u'Enroll in edX101: Overview of Creating an edX Course'
}, {
'href': u'https://www.edx.org/course/creating-course-edx-studio-edx-studiox',
'text': u'Enroll in StudioX',
'sr_text': u'Enroll in StudioX: Creating a Course with edX Studio'
}, {
'href': u'mailto:partner-support@example.com',
'text': u'Contact Us',
'sr_text': 'Send an email to partner-support@example.com'
}]
for expected, actual in zip(expected_links, links):
self.assertEqual(expected['href'], actual.get_attribute('href'))
self.assertEqual(expected['text'], actual.text)
self.assertEqual(
expected['sr_text'],
actual.find_element_by_xpath('following-sibling::span').text
)
@attr(shard=20)
class SignInHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign In' page
"""
def setUp(self):
super(SignInHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_in_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign In' page.
Given that I am on the 'Sign In" page.
And I want help about the sign in
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
sign_in_page = self.index_page.click_sign_in()
expected_url = _get_expected_documentation_url('/getting_started/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_in_page,
href=expected_url,
signed_in=False
)
@attr(shard=20)
class SignUpHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign Up' page.
"""
def setUp(self):
super(SignUpHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_up_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign Up' page.
Given that I am on the 'Sign Up" page.
And I want help about the sign up
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
sign_up_page = self.index_page.click_sign_up()
expected_url = _get_expected_documentation_url('/getting_started/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_up_page,
href=expected_url,
signed_in=False
)
@attr(shard=20)
class HomeHelpTest(StudioCourseTest):
"""
Tests help links on 'Home'(Courses tab) page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(HomeHelpTest, self).setUp()
self.home_page = HomePage(self.browser)
self.home_page.visit()
def test_course_home_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.home_page,
href=expected_url
)
def test_course_home_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Getting Started with Your Platform Studio' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.home_page,
href=expected_url,
help_text='Getting Started with Your Platform Studio',
as_list_item=True
)
@attr(shard=20)
class NewCourseHelpTest(AcceptanceTest):
"""
Test help links while creating a new course.
"""
def setUp(self):
super(NewCourseHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.new_course_button.present)
self.dashboard_page.click_new_course_button()
def test_course_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=expected_url
)
def test_course_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with Your Platform Studio' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=expected_url,
help_text='Getting Started with Your Platform Studio',
as_list_item=True
)
@attr(shard=20)
class NewLibraryHelpTest(AcceptanceTest):
"""
Test help links while creating a new library
"""
def setUp(self):
super(NewLibraryHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.has_new_library_button)
self.dashboard_page.click_new_library()
def test_library_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=expected_url
)
def test_library_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with Your Platform Studio' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=expected_url,
help_text='Getting Started with Your Platform Studio',
as_list_item=True
)
@attr(shard=20)
class LibraryTabHelpTest(AcceptanceTest):
"""
Test help links on the library tab present at dashboard.
"""
def setUp(self):
super(LibraryTabHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
def test_library_tab_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
self.assertTrue(self.dashboard_page.has_new_library_button)
click_css(self.dashboard_page, '#course-index-tabs .libraries-tab', 0, False)
expected_url = _get_expected_documentation_url('/getting_started/CA_get_started_Studio.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=expected_url
)
@attr(shard=20)
class LibraryHelpTest(StudioLibraryTest):
"""
Test help links on a Library page.
"""
def setUp(self):
super(LibraryHelpTest, self).setUp()
self.library_page = LibraryPage(self.browser, self.library_key)
self.library_user_page = LibraryUsersPage(self.browser, self.library_key)
def test_library_content_nav_help(self):
"""
Scenario: Help link in navigation bar is working on content
library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
self.library_page.visit()
expected_url = _get_expected_documentation_url('/course_components/libraries.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_page,
href=expected_url
)
def test_library_content_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on
content library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Learn more about content libraries' in the sidebar links
Then Help link should open.
And help url should be correct
"""
self.library_page.visit()
expected_url = _get_expected_documentation_url('/course_components/libraries.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_page,
href=expected_url,
help_text='Learn more about content libraries'
)
def test_library_user_access_setting_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'User Access'
settings page of library.
Given that I am on the 'User Access' settings page of library.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct.
"""
self.library_user_page.visit()
expected_url = _get_expected_documentation_url(
'/course_components/libraries.html#give-other-users-access-to-your-library'
)
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_user_page,
href=expected_url,
)
@attr(shard=20)
class LibraryImportHelpTest(StudioLibraryTest):
"""
Test help links on a Library import and export pages.
"""
def setUp(self):
super(LibraryImportHelpTest, self).setUp()
self.library_import_page = ImportLibraryPage(self.browser, self.library_key)
self.library_import_page.visit()
def test_library_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_components/libraries.html#import-a-library')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_import_page,
href=expected_url
)
def test_library_import_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Learn more about importing a library' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_components/libraries.html#import-a-library')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_import_page,
href=expected_url,
help_text='Learn more about importing a library'
)
@attr(shard=20)
class LibraryExportHelpTest(StudioLibraryTest):
"""
Test help links on a Library export pages.
"""
def setUp(self):
super(LibraryExportHelpTest, self).setUp()
self.library_export_page = ExportLibraryPage(self.browser, self.library_key)
self.library_export_page.visit()
def test_library_export_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_components/libraries.html#export-a-library')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_export_page,
href=expected_url
)
def test_library_export_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Learn more about exporting a library' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_components/libraries.html#export-a-library')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_export_page,
href=expected_url,
help_text='Learn more about exporting a library'
)
@attr(shard=20)
class CourseOutlineHelpTest(StudioCourseTest):
"""
Tests help links on course outline page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseOutlineHelpTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_outline_page.visit()
@skip("This scenario depends upon TNL-5460")
def test_course_outline_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Outline page
Given that I am on the Course Outline page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/developing_course/course_outline.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_outline_page,
href=expected_url
)
def test_course_outline_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Course Outline page
Given that I am on the Course Outline page.
And I want help about the process
And I click the 'Learn more about the course outline' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/developing_course/course_outline.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_outline_page,
href=expected_url,
help_text='Learn more about the course outline',
index=0
)
@attr(shard=20)
class CourseUpdateHelpTest(StudioCourseTest):
"""
Test help links on Course Update page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseUpdateHelpTest, self).setUp()
self.course_update_page = CourseUpdatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_update_page.visit()
def test_course_update_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Course Update' page
Given that I am on the 'Course Update' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_assets/handouts_updates.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_update_page,
href=expected_url,
)
@attr(shard=20)
class AssetIndexHelpTest(StudioCourseTest):
"""
Test help links on Course 'Files & Uploads' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(AssetIndexHelpTest, self).setUp()
self.course_asset_index_page = AssetIndexPageStudioFrontend(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_asset_index_page.visit()
def test_asset_index_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Files & Uploads' page
Given that I am on the 'Files & Uploads' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_assets/course_files.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_asset_index_page,
href=expected_url,
)
@attr(shard=20)
class CoursePagesHelpTest(StudioCourseTest):
"""
Test help links on Course 'Pages' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CoursePagesHelpTest, self).setUp()
self.course_pages_page = PagesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_pages_page.visit()
def test_course_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Pages' page
Given that I am on the 'Pages' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_assets/pages.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_pages_page,
href=expected_url,
)
@attr(shard=20)
class UploadTextbookHelpTest(StudioCourseTest):
"""
Test help links on Course 'Textbooks' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(UploadTextbookHelpTest, self).setUp()
self.course_textbook_upload_page = TextbookUploadPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_textbook_upload_page.visit()
def test_course_textbook_upload_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_assets/textbooks.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_textbook_upload_page,
href=expected_url,
)
def test_course_textbook_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Learn more about textbooks' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_assets/textbooks.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_textbook_upload_page,
href=expected_url,
help_text='Learn more about textbooks'
)
@attr(shard=20)
class StudioUnitHelpTest(ContainerBase):
"""
Tests help links on Unit page.
"""
def setUp(self, is_staff=True):
super(StudioUnitHelpTest, self).setUp(is_staff=is_staff)
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
Also add a section with a subsection and a unit.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
)
)
def test_unit_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Unit page.
Given that I am on the Unit page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
unit_page = self.go_to_unit_page()
expected_url = _get_expected_documentation_url('/developing_course/course_units.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=unit_page,
href=expected_url,
)
@attr(shard=20)
class SettingsHelpTest(StudioCourseTest):
"""
Tests help links on Schedule and Details Settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(SettingsHelpTest, self).setUp()
self.settings_page = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.settings_page.visit()
def test_settings_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Settings page.
Given that I am on the Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/set_up_course/studio_add_course_information/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.settings_page,
href=expected_url,
)
@attr(shard=20)
class GradingPageHelpTest(StudioCourseTest):
"""
Tests help links on Grading page
"""
def setUp(self, is_staff=False, test_xss=True):
super(GradingPageHelpTest, self).setUp()
self.grading_page = GradingPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.grading_page.visit()
def test_grading_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Grading page.
Given that I am on the Grading page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/grading/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.grading_page,
href=expected_url,
)
@attr(shard=20)
class CourseTeamSettingsHelpTest(StudioCourseTest):
"""
Tests help links on Course Team settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseTeamSettingsHelpTest, self).setUp()
self.course_team_settings_page = CourseTeamPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_team_settings_page.visit()
def test_course_course_team_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Team settings page
Given that I am on the Course Team settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/set_up_course/studio_add_course_information/studio_course_staffing.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_team_settings_page,
href=expected_url,
)
@attr(shard=20)
class CourseGroupConfigurationHelpTest(StudioCourseTest):
"""
Tests help links on course Group Configurations settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseGroupConfigurationHelpTest, self).setUp()
self.course_group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_group_configuration_page.visit()
def test_course_group_conf_nav_help(self):
"""
Scenario: Help link in navigation bar is working on
Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_group_configuration_page,
href=expected_url,
)
def test_course_group_conf_content_group_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'content group' is working
on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/course_features/cohorts/cohorted_courseware.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_group_configuration_page,
href=expected_url,
help_text='Learn More'
)
@attr(shard=20)
class AdvancedSettingHelpTest(StudioCourseTest):
"""
Tests help links on course Advanced Settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(AdvancedSettingHelpTest, self).setUp()
self.advanced_settings = AdvancedSettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.advanced_settings.visit()
def test_advanced_settings_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Advanced Settings page.
Given that I am on the Advanced Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.advanced_settings,
href=expected_url,
)
@attr(shard=20)
class CertificatePageHelpTest(StudioCourseTest):
"""
Tests help links on course Certificate settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(CertificatePageHelpTest, self).setUp()
self.certificates_page = CertificatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.certificates_page.visit()
def test_certificate_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/set_up_course/studio_add_course_information/studio_creating_certificates.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.certificates_page,
href=expected_url,
)
def test_certificate_page_side_bar_help(self):
"""
Scenario: Help link in side bar is working Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Learn more about certificates' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/set_up_course/studio_add_course_information/studio_creating_certificates.html')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.certificates_page,
href=expected_url,
help_text='Learn more about certificates',
)
@attr(shard=20)
class GroupExperimentConfigurationHelpTest(ContainerBase):
"""
Tests help links on course Group Configurations settings page
It is related to Experiment Group Configurations on the page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(GroupExperimentConfigurationHelpTest, self).setUp()
self.group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# self.create_poorly_configured_split_instance()
self.group_configuration_page.visit()
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
def test_course_group_configuration_experiment_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'Experiment Group Configurations'
is working on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url(
'/course_features/content_experiments/content_experiments_configure.html'
'#set-up-group-configurations-in-edx-studio'
)
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.group_configuration_page,
href=expected_url,
help_text='Learn More',
)
@attr(shard=20)
class ToolsImportHelpTest(StudioCourseTest):
"""
Tests help links on tools import pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsImportHelpTest, self).setUp()
self.import_page = ImportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.import_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library import page
Given that I am on the Library import tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/releasing_course/export_import_course.html#import-a-course')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.import_page,
href=expected_url,
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library import page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about importing a course' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/releasing_course/export_import_course.html#import-a-course')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.import_page,
href=expected_url,
help_text='Learn more about importing a course',
)
@attr(shard=20)
class ToolsExportHelpTest(StudioCourseTest):
"""
Tests help links on tools export pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsExportHelpTest, self).setUp()
self.export_page = ExportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.export_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library export page
Given that I am on the Library export tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/releasing_course/export_import_course.html#export-a-course')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.export_page,
href=expected_url,
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library export page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about exporting a course' in the sidebar links
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/releasing_course/export_import_course.html#export-a-course')
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.export_page,
href=expected_url,
help_text='Learn more about exporting a course',
)
@attr(shard=20)
class StudioWelcomeHelpTest(AcceptanceTest):
"""
Tests help link on 'Welcome' page ( User not logged in)
"""
def setUp(self):
super(StudioWelcomeHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_welcome_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Welcome' page (User not logged in).
Given that I am on the 'Welcome' page.
And I want help about the edx
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should be correct
"""
expected_url = _get_expected_documentation_url('/getting_started/index.html')
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.index_page,
href=expected_url,
signed_in=False
)
|
BehavioralInsightsTeam/edx-platform
|
common/test/acceptance/tests/studio/test_studio_help.py
|
Python
|
agpl-3.0
| 42,755
|
[
"VisIt"
] |
8e874a8d8b89b09677ae8705e0867f1ff556cc7641eb3dafb0bb940bc7707334
|
from __future__ import absolute_import, division, unicode_literals
import string
import gettext
_ = gettext.gettext
EOF = None
E = {
"null-character":
_("Null character in input stream, replaced with U+FFFD."),
"invalid-codepoint":
_("Invalid codepoint in stream."),
"incorrectly-placed-solidus":
_("Solidus (/) incorrectly placed in tag."),
"incorrect-cr-newline-entity":
_("Incorrect CR newline entity, replaced with LF."),
"illegal-windows-1252-entity":
_("Entity used with illegal number (windows-1252 reference)."),
"cant-convert-numeric-entity":
_("Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x)."),
"illegal-codepoint-for-numeric-entity":
_("Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x."),
"numeric-entity-without-semicolon":
_("Numeric entity didn't end with ';'."),
"expected-numeric-entity-but-got-eof":
_("Numeric entity expected. Got end of file instead."),
"expected-numeric-entity":
_("Numeric entity expected but none found."),
"named-entity-without-semicolon":
_("Named entity didn't end with ';'."),
"expected-named-entity":
_("Named entity expected. Got none."),
"attributes-in-end-tag":
_("End tag contains unexpected attributes."),
'self-closing-flag-on-end-tag':
_("End tag contains unexpected self-closing flag."),
"expected-tag-name-but-got-right-bracket":
_("Expected tag name. Got '>' instead."),
"expected-tag-name-but-got-question-mark":
_("Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)"),
"expected-tag-name":
_("Expected tag name. Got something else instead"),
"expected-closing-tag-but-got-right-bracket":
_("Expected closing tag. Got '>' instead. Ignoring '</>'."),
"expected-closing-tag-but-got-eof":
_("Expected closing tag. Unexpected end of file."),
"expected-closing-tag-but-got-char":
_("Expected closing tag. Unexpected character '%(data)s' found."),
"eof-in-tag-name":
_("Unexpected end of file in the tag name."),
"expected-attribute-name-but-got-eof":
_("Unexpected end of file. Expected attribute name instead."),
"eof-in-attribute-name":
_("Unexpected end of file in attribute name."),
"invalid-character-in-attribute-name":
_("Invalid character in attribute name"),
"duplicate-attribute":
_("Dropped duplicate attribute on tag."),
"expected-end-of-tag-name-but-got-eof":
_("Unexpected end of file. Expected = or end of tag."),
"expected-attribute-value-but-got-eof":
_("Unexpected end of file. Expected attribute value."),
"expected-attribute-value-but-got-right-bracket":
_("Expected attribute value. Got '>' instead."),
'equals-in-unquoted-attribute-value':
_("Unexpected = in unquoted attribute"),
'unexpected-character-in-unquoted-attribute-value':
_("Unexpected character in unquoted attribute"),
"invalid-character-after-attribute-name":
_("Unexpected character after attribute name."),
"unexpected-character-after-attribute-value":
_("Unexpected character after attribute value."),
"eof-in-attribute-value-double-quote":
_("Unexpected end of file in attribute value (\")."),
"eof-in-attribute-value-single-quote":
_("Unexpected end of file in attribute value (')."),
"eof-in-attribute-value-no-quotes":
_("Unexpected end of file in attribute value."),
"unexpected-EOF-after-solidus-in-tag":
_("Unexpected end of file in tag. Expected >"),
"unexpected-character-after-solidus-in-tag":
_("Unexpected character after / in tag. Expected >"),
"expected-dashes-or-doctype":
_("Expected '--' or 'DOCTYPE'. Not found."),
"unexpected-bang-after-double-dash-in-comment":
_("Unexpected ! after -- in comment"),
"unexpected-space-after-double-dash-in-comment":
_("Unexpected space after -- in comment"),
"incorrect-comment":
_("Incorrect comment."),
"eof-in-comment":
_("Unexpected end of file in comment."),
"eof-in-comment-end-dash":
_("Unexpected end of file in comment (-)"),
"unexpected-dash-after-double-dash-in-comment":
_("Unexpected '-' after '--' found in comment."),
"eof-in-comment-double-dash":
_("Unexpected end of file in comment (--)."),
"eof-in-comment-end-space-state":
_("Unexpected end of file in comment."),
"eof-in-comment-end-bang-state":
_("Unexpected end of file in comment."),
"unexpected-char-in-comment":
_("Unexpected character in comment found."),
"need-space-after-doctype":
_("No space after literal string 'DOCTYPE'."),
"expected-doctype-name-but-got-right-bracket":
_("Unexpected > character. Expected DOCTYPE name."),
"expected-doctype-name-but-got-eof":
_("Unexpected end of file. Expected DOCTYPE name."),
"eof-in-doctype-name":
_("Unexpected end of file in DOCTYPE name."),
"eof-in-doctype":
_("Unexpected end of file in DOCTYPE."),
"expected-space-or-right-bracket-in-doctype":
_("Expected space or '>'. Got '%(data)s'"),
"unexpected-end-of-doctype":
_("Unexpected end of DOCTYPE."),
"unexpected-char-in-doctype":
_("Unexpected character in DOCTYPE."),
"eof-in-innerhtml":
_("XXX innerHTML EOF"),
"unexpected-doctype":
_("Unexpected DOCTYPE. Ignored."),
"non-html-root":
_("html needs to be the first start tag."),
"expected-doctype-but-got-eof":
_("Unexpected End of file. Expected DOCTYPE."),
"unknown-doctype":
_("Erroneous DOCTYPE."),
"expected-doctype-but-got-chars":
_("Unexpected non-space characters. Expected DOCTYPE."),
"expected-doctype-but-got-start-tag":
_("Unexpected start tag (%(name)s). Expected DOCTYPE."),
"expected-doctype-but-got-end-tag":
_("Unexpected end tag (%(name)s). Expected DOCTYPE."),
"end-tag-after-implied-root":
_("Unexpected end tag (%(name)s) after the (implied) root element."),
"expected-named-closing-tag-but-got-eof":
_("Unexpected end of file. Expected end tag (%(name)s)."),
"two-heads-are-not-better-than-one":
_("Unexpected start tag head in existing head. Ignored."),
"unexpected-end-tag":
_("Unexpected end tag (%(name)s). Ignored."),
"unexpected-start-tag-out-of-my-head":
_("Unexpected start tag (%(name)s) that can be in head. Moved."),
"unexpected-start-tag":
_("Unexpected start tag (%(name)s)."),
"missing-end-tag":
_("Missing end tag (%(name)s)."),
"missing-end-tags":
_("Missing end tags (%(name)s)."),
"unexpected-start-tag-implies-end-tag":
_("Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s)."),
"unexpected-start-tag-treated-as":
_("Unexpected start tag (%(originalName)s). Treated as %(newName)s."),
"deprecated-tag":
_("Unexpected start tag %(name)s. Don't use it!"),
"unexpected-start-tag-ignored":
_("Unexpected start tag %(name)s. Ignored."),
"expected-one-end-tag-but-got-another":
_("Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s)."),
"end-tag-too-early":
_("End tag (%(name)s) seen too early. Expected other end tag."),
"end-tag-too-early-named":
_("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."),
"end-tag-too-early-ignored":
_("End tag (%(name)s) seen too early. Ignored."),
"adoption-agency-1.1":
_("End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm."),
"adoption-agency-1.2":
_("End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm."),
"adoption-agency-1.3":
_("End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm."),
"adoption-agency-4.4":
_("End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm."),
"unexpected-end-tag-treated-as":
_("Unexpected end tag (%(originalName)s). Treated as %(newName)s."),
"no-end-tag":
_("This element (%(name)s) has no end tag."),
"unexpected-implied-end-tag-in-table":
_("Unexpected implied end tag (%(name)s) in the table phase."),
"unexpected-implied-end-tag-in-table-body":
_("Unexpected implied end tag (%(name)s) in the table body phase."),
"unexpected-char-implies-table-voodoo":
_("Unexpected non-space characters in "
"table context caused voodoo mode."),
"unexpected-hidden-input-in-table":
_("Unexpected input with type hidden in table context."),
"unexpected-form-in-table":
_("Unexpected form in table context."),
"unexpected-start-tag-implies-table-voodoo":
_("Unexpected start tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-end-tag-implies-table-voodoo":
_("Unexpected end tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-cell-in-table-body":
_("Unexpected table cell start tag (%(name)s) "
"in the table body phase."),
"unexpected-cell-end-tag":
_("Got table cell end tag (%(name)s) "
"while required end tags are missing."),
"unexpected-end-tag-in-table-body":
_("Unexpected end tag (%(name)s) in the table body phase. Ignored."),
"unexpected-implied-end-tag-in-table-row":
_("Unexpected implied end tag (%(name)s) in the table row phase."),
"unexpected-end-tag-in-table-row":
_("Unexpected end tag (%(name)s) in the table row phase. Ignored."),
"unexpected-select-in-select":
_("Unexpected select start tag in the select phase "
"treated as select end tag."),
"unexpected-input-in-select":
_("Unexpected input start tag in the select phase."),
"unexpected-start-tag-in-select":
_("Unexpected start tag token (%(name)s in the select phase. "
"Ignored."),
"unexpected-end-tag-in-select":
_("Unexpected end tag (%(name)s) in the select phase. Ignored."),
"unexpected-table-element-start-tag-in-select-in-table":
_("Unexpected table element start tag (%(name)s) in the select in table phase."),
"unexpected-table-element-end-tag-in-select-in-table":
_("Unexpected table element end tag (%(name)s) in the select in table phase."),
"unexpected-char-after-body":
_("Unexpected non-space characters in the after body phase."),
"unexpected-start-tag-after-body":
_("Unexpected start tag token (%(name)s)"
" in the after body phase."),
"unexpected-end-tag-after-body":
_("Unexpected end tag token (%(name)s)"
" in the after body phase."),
"unexpected-char-in-frameset":
_("Unexpected characters in the frameset phase. Characters ignored."),
"unexpected-start-tag-in-frameset":
_("Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-frameset-in-frameset-innerhtml":
_("Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML)."),
"unexpected-end-tag-in-frameset":
_("Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-char-after-frameset":
_("Unexpected non-space characters in the "
"after frameset phase. Ignored."),
"unexpected-start-tag-after-frameset":
_("Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-frameset":
_("Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-body-innerhtml":
_("Unexpected end tag after body(innerHtml)"),
"expected-eof-but-got-char":
_("Unexpected non-space characters. Expected end of file."),
"expected-eof-but-got-start-tag":
_("Unexpected start tag (%(name)s)"
". Expected end of file."),
"expected-eof-but-got-end-tag":
_("Unexpected end tag (%(name)s)"
". Expected end of file."),
"eof-in-table":
_("Unexpected end of file. Expected table content."),
"eof-in-select":
_("Unexpected end of file. Expected select content."),
"eof-in-frameset":
_("Unexpected end of file. Expected frameset content."),
"eof-in-script-in-script":
_("Unexpected end of file. Expected script content."),
"eof-in-foreign-lands":
_("Unexpected end of file. Expected foreign content"),
"non-void-element-with-trailing-solidus":
_("Trailing solidus not allowed on element %(name)s"),
"unexpected-html-element-in-foreign-content":
_("Element %(name)s not allowed in a non-html context"),
"unexpected-end-tag-before-html":
_("Unexpected end tag (%(name)s) before html."),
"XXX-undefined-error":
_("Undefined error (this sucks and should be fixed)"),
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset((
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
))
formattingElements = frozenset((
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
))
specialElements = frozenset((
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
))
htmlIntegrationPointElements = frozenset((
(namespaces["mathml"], "annotaion-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
))
mathmlTextIntegrationPointElements = frozenset((
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
))
spaceCharacters = frozenset((
"\t",
"\n",
"\u000C",
" ",
"\r"
))
tableInsertModeElements = frozenset((
"table",
"tbody",
"tfoot",
"thead",
"tr"
))
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset((
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
))
cdataElements = frozenset(('title', 'textarea'))
rcdataElements = frozenset((
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
))
booleanAttributes = {
"": frozenset(("irrelevant",)),
"style": frozenset(("scoped",)),
"img": frozenset(("ismap",)),
"audio": frozenset(("autoplay", "controls")),
"video": frozenset(("autoplay", "controls")),
"script": frozenset(("defer", "async")),
"details": frozenset(("open",)),
"datagrid": frozenset(("multiple", "disabled")),
"command": frozenset(("hidden", "disabled", "checked", "default")),
"hr": frozenset(("noshade")),
"menu": frozenset(("autosubmit",)),
"fieldset": frozenset(("disabled", "readonly")),
"option": frozenset(("disabled", "readonly", "selected")),
"optgroup": frozenset(("disabled", "readonly")),
"button": frozenset(("disabled", "autofocus")),
"input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")),
"select": frozenset(("disabled", "readonly", "autofocus", "multiple")),
"output": frozenset(("disabled", "readonly")),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;'))
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
encodings = {
'437': 'cp437',
'850': 'cp850',
'852': 'cp852',
'855': 'cp855',
'857': 'cp857',
'860': 'cp860',
'861': 'cp861',
'862': 'cp862',
'863': 'cp863',
'865': 'cp865',
'866': 'cp866',
'869': 'cp869',
'ansix341968': 'ascii',
'ansix341986': 'ascii',
'arabic': 'iso8859-6',
'ascii': 'ascii',
'asmo708': 'iso8859-6',
'big5': 'big5',
'big5hkscs': 'big5hkscs',
'chinese': 'gbk',
'cp037': 'cp037',
'cp1026': 'cp1026',
'cp154': 'ptcp154',
'cp367': 'ascii',
'cp424': 'cp424',
'cp437': 'cp437',
'cp500': 'cp500',
'cp775': 'cp775',
'cp819': 'windows-1252',
'cp850': 'cp850',
'cp852': 'cp852',
'cp855': 'cp855',
'cp857': 'cp857',
'cp860': 'cp860',
'cp861': 'cp861',
'cp862': 'cp862',
'cp863': 'cp863',
'cp864': 'cp864',
'cp865': 'cp865',
'cp866': 'cp866',
'cp869': 'cp869',
'cp936': 'gbk',
'cpgr': 'cp869',
'cpis': 'cp861',
'csascii': 'ascii',
'csbig5': 'big5',
'cseuckr': 'cp949',
'cseucpkdfmtjapanese': 'euc_jp',
'csgb2312': 'gbk',
'cshproman8': 'hp-roman8',
'csibm037': 'cp037',
'csibm1026': 'cp1026',
'csibm424': 'cp424',
'csibm500': 'cp500',
'csibm855': 'cp855',
'csibm857': 'cp857',
'csibm860': 'cp860',
'csibm861': 'cp861',
'csibm863': 'cp863',
'csibm864': 'cp864',
'csibm865': 'cp865',
'csibm866': 'cp866',
'csibm869': 'cp869',
'csiso2022jp': 'iso2022_jp',
'csiso2022jp2': 'iso2022_jp_2',
'csiso2022kr': 'iso2022_kr',
'csiso58gb231280': 'gbk',
'csisolatin1': 'windows-1252',
'csisolatin2': 'iso8859-2',
'csisolatin3': 'iso8859-3',
'csisolatin4': 'iso8859-4',
'csisolatin5': 'windows-1254',
'csisolatin6': 'iso8859-10',
'csisolatinarabic': 'iso8859-6',
'csisolatincyrillic': 'iso8859-5',
'csisolatingreek': 'iso8859-7',
'csisolatinhebrew': 'iso8859-8',
'cskoi8r': 'koi8-r',
'csksc56011987': 'cp949',
'cspc775baltic': 'cp775',
'cspc850multilingual': 'cp850',
'cspc862latinhebrew': 'cp862',
'cspc8codepage437': 'cp437',
'cspcp852': 'cp852',
'csptcp154': 'ptcp154',
'csshiftjis': 'shift_jis',
'csunicode11utf7': 'utf-7',
'cyrillic': 'iso8859-5',
'cyrillicasian': 'ptcp154',
'ebcdiccpbe': 'cp500',
'ebcdiccpca': 'cp037',
'ebcdiccpch': 'cp500',
'ebcdiccphe': 'cp424',
'ebcdiccpnl': 'cp037',
'ebcdiccpus': 'cp037',
'ebcdiccpwt': 'cp037',
'ecma114': 'iso8859-6',
'ecma118': 'iso8859-7',
'elot928': 'iso8859-7',
'eucjp': 'euc_jp',
'euckr': 'cp949',
'extendedunixcodepackedformatforjapanese': 'euc_jp',
'gb18030': 'gb18030',
'gb2312': 'gbk',
'gb231280': 'gbk',
'gbk': 'gbk',
'greek': 'iso8859-7',
'greek8': 'iso8859-7',
'hebrew': 'iso8859-8',
'hproman8': 'hp-roman8',
'hzgb2312': 'hz',
'ibm037': 'cp037',
'ibm1026': 'cp1026',
'ibm367': 'ascii',
'ibm424': 'cp424',
'ibm437': 'cp437',
'ibm500': 'cp500',
'ibm775': 'cp775',
'ibm819': 'windows-1252',
'ibm850': 'cp850',
'ibm852': 'cp852',
'ibm855': 'cp855',
'ibm857': 'cp857',
'ibm860': 'cp860',
'ibm861': 'cp861',
'ibm862': 'cp862',
'ibm863': 'cp863',
'ibm864': 'cp864',
'ibm865': 'cp865',
'ibm866': 'cp866',
'ibm869': 'cp869',
'iso2022jp': 'iso2022_jp',
'iso2022jp2': 'iso2022_jp_2',
'iso2022kr': 'iso2022_kr',
'iso646irv1991': 'ascii',
'iso646us': 'ascii',
'iso88591': 'windows-1252',
'iso885910': 'iso8859-10',
'iso8859101992': 'iso8859-10',
'iso885911987': 'windows-1252',
'iso885913': 'iso8859-13',
'iso885914': 'iso8859-14',
'iso8859141998': 'iso8859-14',
'iso885915': 'iso8859-15',
'iso885916': 'iso8859-16',
'iso8859162001': 'iso8859-16',
'iso88592': 'iso8859-2',
'iso885921987': 'iso8859-2',
'iso88593': 'iso8859-3',
'iso885931988': 'iso8859-3',
'iso88594': 'iso8859-4',
'iso885941988': 'iso8859-4',
'iso88595': 'iso8859-5',
'iso885951988': 'iso8859-5',
'iso88596': 'iso8859-6',
'iso885961987': 'iso8859-6',
'iso88597': 'iso8859-7',
'iso885971987': 'iso8859-7',
'iso88598': 'iso8859-8',
'iso885981988': 'iso8859-8',
'iso88599': 'windows-1254',
'iso885991989': 'windows-1254',
'isoceltic': 'iso8859-14',
'isoir100': 'windows-1252',
'isoir101': 'iso8859-2',
'isoir109': 'iso8859-3',
'isoir110': 'iso8859-4',
'isoir126': 'iso8859-7',
'isoir127': 'iso8859-6',
'isoir138': 'iso8859-8',
'isoir144': 'iso8859-5',
'isoir148': 'windows-1254',
'isoir149': 'cp949',
'isoir157': 'iso8859-10',
'isoir199': 'iso8859-14',
'isoir226': 'iso8859-16',
'isoir58': 'gbk',
'isoir6': 'ascii',
'koi8r': 'koi8-r',
'koi8u': 'koi8-u',
'korean': 'cp949',
'ksc5601': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'l1': 'windows-1252',
'l10': 'iso8859-16',
'l2': 'iso8859-2',
'l3': 'iso8859-3',
'l4': 'iso8859-4',
'l5': 'windows-1254',
'l6': 'iso8859-10',
'l8': 'iso8859-14',
'latin1': 'windows-1252',
'latin10': 'iso8859-16',
'latin2': 'iso8859-2',
'latin3': 'iso8859-3',
'latin4': 'iso8859-4',
'latin5': 'windows-1254',
'latin6': 'iso8859-10',
'latin8': 'iso8859-14',
'latin9': 'iso8859-15',
'ms936': 'gbk',
'mskanji': 'shift_jis',
'pt154': 'ptcp154',
'ptcp154': 'ptcp154',
'r8': 'hp-roman8',
'roman8': 'hp-roman8',
'shiftjis': 'shift_jis',
'tis620': 'cp874',
'unicode11utf7': 'utf-7',
'us': 'ascii',
'usascii': 'ascii',
'utf16': 'utf-16',
'utf16be': 'utf-16-be',
'utf16le': 'utf-16-le',
'utf8': 'utf-8',
'windows1250': 'cp1250',
'windows1251': 'cp1251',
'windows1252': 'cp1252',
'windows1253': 'cp1253',
'windows1254': 'cp1254',
'windows1255': 'cp1255',
'windows1256': 'cp1256',
'windows1257': 'cp1257',
'windows1258': 'cp1258',
'windows936': 'gbk',
'x-x-big5': 'big5'}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]))
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
pass
class ReparseException(Exception):
pass
|
KrzysztofStachanczyk/Sensors-WWW-website
|
www/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.py
|
Python
|
gpl-3.0
| 86,469
|
[
"Bowtie"
] |
38cd030ffd8806b5521a4279dd743969091d051f8d8753fa213e63341640dc3d
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
Based on the Brunel and Wang model.
.. moduleauthor:: Paula Sanz Leon <Paula@tvb.invalid>
.. moduleauthor:: Marmaduke Woodman <Marmaduke@tvb.invalid>
.. moduleauthor:: Stuart A. Knock <Stuart@tvb.invalid>
"""
from tvb.basic.profile import TvbProfile
TvbProfile.set_profile(TvbProfile.TEST_LIBRARY_PROFILE)
import inspect
import numpy
import tvb.datatypes.arrays as arrays
import tvb.datatypes.lookup_tables as lookup_tables
import tvb.basic.traits.types_basic as basic
import tvb.simulator.models as models
from tvb.simulator.common import get_logger
LOG = get_logger(__name__)
class BrunelWang(models.Model):
"""
.. [DJ_2012] Deco G and Jirsa V. *Ongoing Cortical
Activity at Rest: Criticality, Multistability, and Ghost Attractors*.
Journal of Neuroscience 32, 3366-3375, 2012.
.. [BW_2001] Brunel N and Wang X-J. *Effects of neuromodulation in a cortical
network model of object working memory dominated by recurrent inhibition*.
Journal of Computational Neuroscience 11, 63–85, 2001.
Each node consists of one excitatory (E) and one inhibitory (I) pool.
At a global level, it uses Hagmann's 2008 connectome 66 areas(hagmann_struct.csv)
with a global scaling weight (W) of 1.65.
"""
_ui_name = "Deco-Jirsa (Mean-Field Brunel-Wang)"
ui_configurable_parameters = ['tau', 'calpha', 'cbeta', 'cgamma', 'tauNMDArise',
'tauNMDAdecay', 'tauAMPA', 'tauGABA',
'VE', 'VI', 'VL', 'Vthr', 'Vreset', 'gNMDA_e',
'gNMDA_i', 'gGABA_e', 'gGABA_i', 'gAMPArec_e',
'gAMPArec_i', 'gAMPAext_e', 'gAMPAext_i',
'gm_e', 'gm_i', 'Cm_e', 'Cm_i', 'taum_e',
'taum_i', 'taurp_e', 'taurp_i', 'Cext', 'C',
'nuext', 'wplus', 'wminus', 'W']
#Define traited attributes for this model, these represent possible kwargs.
tau = arrays.FloatArray(
label = ":math:`\\tau`",
default = numpy.array([1.25,]),
range = basic.Range(lo = 0.01, hi = 5.0, step = 0.01),
doc = """A time-scale separation between the fast, :math:`V`, and slow,
:math:`W`, state-variables of the model.""",
order = 1)
calpha = arrays.FloatArray(
label = ":math:`c_{\\alpha}`",
default = numpy.array([0.5,]),
range = basic.Range(lo = 0.4, hi = 0.5, step = 0.05),
doc = """NMDA saturation parameter (kHz)""",
order = 2)
cbeta = arrays.FloatArray(
label = ":math:`c_{\\beta}`",
default = numpy.array([0.062,]),
range = basic.Range(lo = 0.06, hi = 0.062, step = 0.002),
doc = """Inverse MG2+ blockade potential(mV-1)""",
order = 3)
cgamma = arrays.FloatArray(
label = ":math:`c_{\\gamma}`",
default = numpy.array([0.2801120448,]),
range = basic.Range(lo = 0.2801120440, hi = 0.2801120448, step = 0.0000000001),
doc = """Strength of Mg2+ blockade""",
order = -1)
tauNMDArise = arrays.FloatArray(
label = ":math:`\\tau_{NMDA_{rise}}`",
default = numpy.array([2.0,]),
range = basic.Range(lo = 0.0, hi = 2.0, step = 0.5),
doc="""NMDA time constant (rise) (ms)""",
order = 4)
tauNMDAdecay = arrays.FloatArray(
label = ":math:`\\tau_{NMDA_{decay}}`",
default = numpy.array([100.,]),
range = basic.Range(lo = 50.0, hi = 100.0, step = 10.0),
doc = """NMDA time constant (decay) (ms)""",
order = 5)
tauAMPA = arrays.FloatArray(
label = ":math:`\\tau_{AMPA}`",
default = numpy.array([2.0,]),
range = basic.Range(lo = 1.0, hi = 2.0, step = 1.0),
doc = """AMPA time constant (decay) (ms)""",
order = 6)
tauGABA = arrays.FloatArray(
label = ":math:`\\tau_{GABA}`",
default = numpy.array([10.0,]),
range = basic.Range(lo = 5.0, hi = 15.0, step = 1.0),
doc = """GABA time constant (decay) (ms)""",
order = 7)
VE = arrays.FloatArray(
label = ":math:`V_E`",
default = numpy.array([0.0,]),
range = basic.Range(lo = 0.0, hi = 10.0, step = 2.0),
doc = """Extracellular potential (mV)""",
order = 8)
VI = arrays.FloatArray(
label = ":math:`V_I`",
default = numpy.array([-70.0,]),
range = basic.Range(lo = -70.0, hi = -50.0, step = 5.0),
doc = """.""",
order = -1)
VL = arrays.FloatArray(
label = ":math:`V_L`",
default = numpy.array([-70.0,]),
range = basic.Range(lo = -70.0, hi = -50.0, step = 5.0),
doc = """Resting potential (mV)""",
order = -1)
Vthr = arrays.FloatArray(
label = ":math:`V_{thr}`",
default = numpy.array([-50.0,]),
range = basic.Range(lo = -50.0, hi = -30.0, step = 5.0),
doc = """Threshold potential (mV)""",
order = -1)
Vreset = arrays.FloatArray(
label = ":math:`V_{reset}`",
default = numpy.array([-55.0,]),
range = basic.Range(lo = -70.0, hi = -30.0, step = 5.0),
doc = """Reset potential (mV)""",
order = 9)
gNMDA_e = arrays.FloatArray(
label = ":math:`g_{NMDA_{e}}`",
default = numpy.array([0.327,]),
range = basic.Range(lo = 0.320, hi = 0.350, step = 0.0035),
doc = """NMDA conductance on post-synaptic excitatory (nS)""",
order = -1)
gNMDA_i = arrays.FloatArray(
label = ":math:`g_{NMDA_{i}}`",
default = numpy.array([0.258,]),
range = basic.Range(lo = 0.250, hi = 0.270, step = 0.002),
doc = """NMDA conductance on post-synaptic inhibitory (nS)""",
order = -1)
gGABA_e = arrays.FloatArray(
label = ":math:`g_{GABA_{e}}`",
default = numpy.array([1.25 * 3.5, ]),
range = basic.Range(lo = 1.25, hi = 4.375, step = 0.005),
doc = """GABA conductance on excitatory post-synaptic (nS)""",
order = 10)
gGABA_i = arrays.FloatArray(
label = ":math:`g_{GABA_{i}}`",
default = numpy.array([0.973 * 3.5, ]),
range = basic.Range(lo = 0.9730, hi = 3.4055, step = 0.0005),
doc = """GABA conductance on inhibitory post-synaptic (nS)""",
order = 11)
gAMPArec_e = arrays.FloatArray(
label = ":math:`g_{AMPA_{rec_e}}`",
default = numpy.array([0.104,]),
range = basic.Range(lo = 0.1, hi = 0.11, step = 0.001),
doc = """AMPA(recurrent) cond on post-synaptic (nS)""",
order = -1)
gAMPArec_i = arrays.FloatArray(
label = ":math:`g_{AMPA_{rec_i}}`",
default = numpy.array([0.081,]),
range = basic.Range(lo = 0.081, hi = 0.1, step = 0.001),
doc = """AMPA(recurrent) cond on post-synaptic (nS)""",
order = -1)
gAMPAext_e = arrays.FloatArray(
label = ":math:`g_{AMPA_{ext_e}}`",
default = numpy.array([2.08 * 1.2,]),
range = basic.Range(lo = 2.08, hi = 2.496, step = 0.004),
doc = """AMPA(external) cond on post-synaptic (nS)""",
order = 12)
gAMPAext_i = arrays.FloatArray(
label = ":math:`g_{AMPA_{ext_i}}`",
default = numpy.array([1.62 * 1.2,]),
range = basic.Range(lo = 1.62, hi = 1.944, step = 0.004),
doc = """AMPA(external) cond on post-synaptic (nS)""",
order = 13)
gm_e = arrays.FloatArray(
label = ":math:`gm_e`",
default = numpy.array([25.0,]),
range = basic.Range(lo = 20.0, hi = 25.0, step = 1.0),
doc = """Excitatory membrane conductance (nS)""",
order = 13)
gm_i = arrays.FloatArray(
label = ":math:`gm_i`",
default = numpy.array([20.,]),
range = basic.Range(lo = 15.0, hi = 21.0, step = 1.0),
doc = """Inhibitory membrane conductance (nS)""",
order = 14)
Cm_e = arrays.FloatArray(
label = ":math:`Cm_e`",
default = numpy.array([500.,]),
range = basic.Range(lo = 200.0, hi = 600.0, step = 50.0),
doc = """Excitatory membrane capacitance (mF)""",
order = 15)
Cm_i = arrays.FloatArray(
label = ":math:`Cm_i`",
default = numpy.array([200.,]),
range = basic.Range(lo = 150.0, hi = 250.0, step = 50.0),
doc = """Inhibitory membrane capacitance (mF)""",
order = 16)
taum_e = arrays.FloatArray(
label = ":math:`\\tau_{m_{e}}`",
default = numpy.array([20.,]),
range = basic.Range(lo = 10.0, hi = 25.0, step = 5.0),
doc = """Excitatory membrane leak time (ms)""",
order = 17)
taum_i = arrays.FloatArray(
label = ":math:`\\tau_{m_{i}}`",
default = numpy.array([10.0,]),
range = basic.Range(lo = 5.0, hi = 15.0, step = 5.),
doc = """Inhibitory Membrane leak time (ms)""",
order = 18)
taurp_e = arrays.FloatArray(
label = ":math:`\\tau_{rp_{e}}`",
default = numpy.array([2.0,]),
range = basic.Range(lo = 0.0, hi = 4.0, step = 1.),
doc = """Excitatory absolute refractory period (ms)""",
order = 19)
taurp_i = arrays.FloatArray(
label = ":math:`\\tau_{rp_{i}}`",
default = numpy.array([1.0,]),
range = basic.Range(lo = 0.0, hi = 2.0, step = 0.5),
doc= """Inhibitory absolute refractory period (ms)""",
order = 20)
Cext = arrays.IntegerArray(
label = ":math:`C_{ext}`",
default = numpy.array([800,]),
range = basic.Range(lo = 500, hi = 1200, step = 100),
doc = """Number of external (excitatory) connections""",
order = -1)
C = arrays.IntegerArray(
label = ":math:`C`",
default = numpy.array([200,]),
range = basic.Range(lo = 100, hi = 500, step = 100),
doc = "Number of neurons for each node",
order = -1)
nuext = arrays.FloatArray(
label = ":math:`\\nu_{ext}`",
default = numpy.array([0.003,]),
range = basic.Range(lo = 0.002, hi = 0.01, step = 0.001),
doc = """External firing rate (kHz)""",
order = -1)
wplus = arrays.FloatArray(
label = ":math:`w_{+}`",
default = numpy.array([1.5,]),
range = basic.Range(lo = 0.5, hi = 3., step = 0.05),
doc = """Synaptic coupling strength [w+] (dimensionless)""",
order = -1)
wminus = arrays.FloatArray(
label = ":math:`w_{-}`",
default = numpy.array([1.,]),
range = basic.Range(lo = 0.2, hi = 2., step = 0.05),
doc = """Synaptic coupling strength [w-] (dimensionless)""",
order = -1)
NMAX = arrays.IntegerArray(
label = ":math:`N_{MAX}`",
default = numpy.array([8, ], dtype=numpy.int32),
range = basic.Range(lo = 2, hi = 8, step=1),
doc = """This is a magic number as given in the original code.
It is used to compute the phi and psi -- computationally expensive --
functions""",
order = -1)
pool_nodes = arrays.FloatArray(
label = ":math:`p_{nodes}`",
default = numpy.array([74.0, ]),
range = basic.Range(lo = 1.0, hi = 74.0, step = 1.0),
doc = """Scale coupling weight sby the number of nodes in the network""",
order = 23)
a = arrays.FloatArray(
label = ":math:`a`",
default = numpy.array([0.80823563, ]),
range = basic.Range(lo = 0.80, hi = 0.88, step = 0.01),
doc = """.""",
order = -1)
b = arrays.FloatArray(
label = ":math:`b`",
default = numpy.array([67.06177975, ]),
range = basic.Range(lo = 66.0, hi = 69.0, step = 0.5 ),
doc = """.""",
order = -1)
ve = arrays.FloatArray(
label = ":math:`ve`",
default = numpy.array([- 52.5, ]),
range = basic.Range(lo = -50.0, hi = -45.0, step = 0.2),
doc = """.""",
order = -1)
vi = arrays.FloatArray(
label = ":math:`vi`",
default = numpy.array([- 52.5,]),
range = basic.Range(lo = -50.0, hi = -45.0, step = 0.2 ),
doc = """.""",
order = -1)
W = arrays.FloatArray(
label = ":math:`W`",
default = numpy.array([1.65,]),
range = basic.Range(lo = 1.4, hi = 1.9, step = 0.05),
doc = """Global scaling weight [W] (dimensionless)""",
order = -1)
variables_of_interest = basic.Enumerate(
label="Variables watched by Monitors",
options=["E", "I"],
default=["E"],
select_multiple=True,
doc="""This represents the default state-variables of this Model to be
monitored. It can be overridden for each Monitor if desired. The
corresponding state-variable indices for this model are :math:`E = 0`
and :math:`I = 1`.""",
order=21)
#Informational attribute, used for phase-plane and initial()
state_variable_range = basic.Dict(
label = "State Variable ranges [lo, hi]",
default = {"E": numpy.array([0.001, 0.01]),
"I": numpy.array([0.001, 0.01])},
doc = """The values for each state-variable should be set to encompass
the expected dynamic range of that state-variable for the current
parameters, it is used as a mechanism for bounding random initial
conditions when the simulation isn't started from an explicit
history, it is also provides the default range of phase-plane plots.
The corresponding state-variable units for this model are kHz.""",
order = 22)
# psi_table = lookup_tables.PsiTable(required=True,
# default=lookup_tables.PsiTable(),
# label="Psi Table",
# doc="""Psi Table (description).""")
#
# nerf_table = lookup_tables.NerfTable(required=True,
# default=lookup_tables.NerfTable(),
# label="Nerf Table",
# doc="""Nerf Table (description).""")
def __init__(self, **kwargs):
"""
May need to put kwargs back if we can't get them from trait...
"""
LOG.info("%s: initing..." % str(self))
super(BrunelWang, self).__init__(**kwargs)
#self._state_variables = ["E", "I"]
self._nvar = 2
self.cvar = numpy.array([0, 1], dtype=numpy.int32)
#Derived parameters
self.crho1_e = None
self.crho1_i = None
self.crho2_e = None
self.crho2_i = None
self.csigma_e = None
self.csigma_i = None
self.tauNMDA = None
self.Text_e = None
self.Text_i = None
self.TAMPA_e = None
self.TAMPA_i = None
self.T_ei = None
self.T_ii = None
self.pool_fractions = None
#NOTE: We could speed up this model simplifying some the phi and psi functions
#above. However it was decided that functions should be the same as
# in the original paper.
# integral
#self.vector_nerf = lambda z: numpy.exp(z**2) * (scipy.special.erf(z) + 1)
#integral = lambda x : numpy.float64(quad(self.vector_nerf, float('-Inf') , x, full_output = True)[0])
#self.vint = numpy.vectorize(integral)
LOG.debug('%s: inited.' % repr(self))
def configure(self):
""" """
super(BrunelWang, self).configure()
self.update_derived_parameters()
self.psi_table = lookup_tables.PsiTable(load_default=True, use_storage=False)
self.nerf_table = lookup_tables.NerfTable(load_default=True, use_storage=False)
# configure look up tables
self.psi_table.configure()
self.nerf_table.configure()
#self.optimize()
def optimize(self, fnname='optdfun'):
"""
Optimization routine when we have too many self.parameters
within dfun
"""
decl = "def %s(state_variables, coupling, local_coupling=0.0):\n" % (fnname,)
NoneType = type(None)
for k in dir(self):
attr = getattr(self, k)
if not k[0] == '_' and type(attr) in (numpy.ndarray, NoneType):
decl += ' %s = %r\n' % (k, attr)
decl += '\n'.join(inspect.getsource(self.dfun).split('\n')[1:]).replace("self.", "")
dikt = {'vint': self.vint, 'array': numpy.array, 'int32': numpy.int32, 'numpy': numpy}
#print decl
exec decl in dikt
self.dfun = dikt[fnname]
def dfun(self, state_variables, coupling, local_coupling=0.0):
"""
.. math::
\tau_e*\\dot{\nu_e}(t) &= -\nu_e(t) + \\phi_e \\\\
\tau_i*\\dot{\nu_i}(t) &= -\nu_i(t) + \\phi_i \\\\
ve &= - (V_thr - V_reset) \\, \nu_e \\, \tau_e + \\mu_e \\\\
vi &= - (V_thr - V_reset) \\, \nu_i \\, \tau_i + \\mu_i \\\\
\tau_X &= \\frac{C_m_X}{g_m_x \\, S_X} \\\\
S_X &= 1 + Text \\, \nu_ext + T_ampa \\, \nu_X + (rho_1 + rho_2)
\\, \\psi(\nu_X) + T_XI \\, \\nu_I \\\\
\\mu_X &= \\frac{(Text \\, \\nu_X + T_AMPA \\, \\nu_X + \\rho_1 \\,
\\psi(\nu_X)) \\, (V_E - V_L)}{S_X} +
\\frac{\\rho_2 \\, \\psi(\nu_X) \\,(\\bar{V_X} - V_L) +
T_xI \\, \\nu_I \\, (V_I - V_L)}{S_X} \\\\
sigma_X^2 &= \\frac{g_AMPA_ext^2(\\bar{V_X} - V_X)^2 \\, C_ext \\, nu_ext
\\tau_AMPA^2 \\, \\tau_X}{g_m_X^2 * \\tau_m_X^2} \\\\
\\rho_1 &= {g_NMDA * C}{g_m_X * J} \\\\
\\rho_2 &= \\beta \\frac{g_NMDA * C (\\bar{V_X} - V_E)(J - 1)}
{g_m_X * J^2} \\\\
J_X &= 1 + \\gamma \\,\exp(-\\beta*\\bar{V_X}) \\\\
\\phi(\mu_X, \\sigma_X) &= (\\tau_rp_X + \\tau_X \\, \\int
\exp(u^2) * (\\erf(u) + 1))^-1
The NMDA gating variable
.. math::
\\psi(\\nu)
has been approximated by the exponential function:
.. math::
\\psi(\\nu) &= a * (1 - \exp(-b * \\nu)) \\\\
a &= 0.80823563 \\\\
b &= 67.06177975
The post-synaptic rate as described by the :math:`\\phi` function
constitutes a non-linear input-output relationship between the firing
rate of the post-synaptic neuron and the average firing rates
:math:`\\nu_{E}` and :math:`\\nu_{I}` of the pre-synaptic excitatory and
inhibitory neural populations. This input-output function is
conceptually equivalent to the simple threshold-linear or sigmoid
input-output functions routinely used in firing-rate models. What it is
gained from using the integral form is a firing-rate model that captures
many of the underlying biophysics of the real spiking neurons.[BW_2001]_
"""
E = state_variables[0, :]
I = state_variables[1, :]
#A = state_variables[2, :]
# where and how to add local coupling
c_0 = coupling[0, :]
c_2 = coupling[1, :]
# AMPA synapses (E --> E, and E --> I)
vn_e = c_0
vn_i = E * self.wminus * self.pool_fractions
# NMDA synapses (E --> E, and E --> I)
vN_e = c_2
vN_i = E * self.wminus * self.pool_fractions
# GABA (A) synapses (I --> E, and I --> I)
vni_e = self.wminus * I # I --> E
vni_i = self.wminus * I # I --> I
J_e = 1 + self.cgamma * numpy.exp(-self.cbeta * self.ve)
J_i = 1 + self.cgamma * numpy.exp(-self.cbeta * self.vi)
rho1_e = self.crho1_e / J_e
rho1_i = self.crho1_i / J_i
rho2_e = self.crho2_e * (self.ve - self.VE) * (J_e - 1) / J_e ** 2
rho2_i = self.crho2_i * (self.vi - self.VI) * (J_i - 1) / J_i ** 2
vS_e = 1 + self.Text_e * self.nuext + self.TAMPA_e * vn_e + \
(rho1_e + rho2_e) * vN_e + self.T_ei * vni_e
vS_i = 1 + self.Text_i * self.nuext + self.TAMPA_i * vn_i + \
(rho1_i + rho2_i) * vN_i + self.T_ii * vni_i
vtau_e = self.Cm_e / (self.gm_e * vS_e)
vtau_i = self.Cm_i / (self.gm_i * vS_i)
vmu_e = (rho2_e * vN_e * self.ve + self.T_ei * vni_e * self.VI + \
self.VL) / vS_e
vmu_i = (rho2_i * vN_i * self.vi + self.T_ii * vni_i * self.VI + \
self.VL) / vS_i
vsigma_e = numpy.sqrt((self.ve - self.VE) ** 2 * vtau_e * \
self.csigma_e * self.nuext)
vsigma_i = numpy.sqrt((self.vi - self.VE) ** 2 * vtau_i * \
self.csigma_i * self.nuext)
#tauAMPA_over_vtau_e
k_e = self.tauAMPA / vtau_e
k_i = self.tauAMPA / vtau_i
#integration limits
alpha_e = (self.Vthr - vmu_e) / vsigma_e * (1.0 + 0.5 * k_e) + \
1.03 * numpy.sqrt(k_e) - 0.5 * k_e
alpha_e = numpy.where(alpha_e > 19, 19, alpha_e)
alpha_i = (self.Vthr - vmu_i) / vsigma_i * (1.0 + 0.5 * k_i) + \
1.03 * numpy.sqrt(k_i) - 0.5 * k_i
alpha_i = numpy.where(alpha_i > 19, 19, alpha_i)
beta_e = (self.Vreset - vmu_e) / vsigma_e
beta_e = numpy.where(beta_e > 19, 19, beta_e)
beta_i = (self.Vreset - vmu_i) / vsigma_i
beta_i = numpy.where(beta_i > 19, 19, beta_i)
v_ae = self.nerf_table.search_value(alpha_e)
v_ai = self.nerf_table.search_value(alpha_i)
v_be = self.nerf_table.search_value(beta_e)
v_bi = self.nerf_table.search_value(beta_e)
v_integral_e = v_ae - v_be
v_integral_i = v_ai - v_bi
Phi_e = 1 / (self.taurp_e + vtau_e * numpy.sqrt(numpy.pi) * v_integral_e)
Phi_i = 1 / (self.taurp_i + vtau_i * numpy.sqrt(numpy.pi) * v_integral_i)
self.ve = - (self.Vthr - self.Vreset) * E * vtau_e + vmu_e
self.vi = - (self.Vthr - self.Vreset) * I * vtau_i + vmu_i
dE = (-E + Phi_e) / vtau_e
dI = (-I + Phi_i) / vtau_i
derivative = numpy.array([dE, dI])
return derivative
def update_derived_parameters(self):
"""
Derived parameters
"""
self.pool_fractions = 1. / (self.pool_nodes * 2)
self.tauNMDA = self.calpha * self.tauNMDArise * self.tauNMDAdecay
self.Text_e = (self.gAMPAext_e * self.Cext * self.tauAMPA) / self.gm_e
self.Text_i = (self.gAMPAext_i * self.Cext * self.tauAMPA) / self.gm_i
self.TAMPA_e = (self.gAMPArec_e * self.C * self.tauAMPA) / self.gm_e
self.TAMPA_i = (self.gAMPArec_i * self.C * self.tauAMPA) / self.gm_i
self.T_ei = (self.gGABA_e * self.C * self.tauGABA) / self.gm_e
self.T_ii = (self.gGABA_i * self.C * self.tauGABA) / self.gm_i
self.crho1_e = (self.gNMDA_e * self.C) / self.gm_e
self.crho1_i = (self.gNMDA_i * self.C) / self.gm_i
self.crho2_e = self.cbeta * self.crho1_e
self.crho2_i = self.cbeta * self.crho1_i
self.csigma_e = (self.gAMPAext_e ** 2 * self.Cext * self.tauAMPA ** 2) / \
(self.gm_e * self.taum_e) ** 2
self.csigma_i = (self.gAMPAext_i ** 2 * self.Cext * self.tauAMPA ** 2) / \
(self.gm_i * self.taum_i) ** 2
if __name__ == "__main__":
# Do some stuff that tests or makes use of this module...
LOG.info("Testing %s module..." % __file__)
# Check that the docstring examples, if there are any, are accurate.
import doctest
doctest.testmod()
#Initialise Models in their default state:
BW = BrunelWang()
LOG.info("Model initialised in its default state without error...")
LOG.info("Testing phase plane interactive ... ")
# Check the Phase Plane
from tvb.simulator.plot.phase_plane_interactive import PhasePlaneInteractive
import tvb.simulator.integrators
INTEGRATOR = tvb.simulator.integrators.HeunDeterministic(dt=2**-5)
ppi_fig = PhasePlaneInteractive(model=BW, integrator=INTEGRATOR)
ppi_fig.show()
|
stuart-knock/tvb-library
|
contrib/simulator/models/brunel_wang.py
|
Python
|
gpl-2.0
| 25,716
|
[
"NEURON"
] |
88b32daf41fc6054eae9d83b8b02d01b13f0711dedbc8c6e395f2f1b34b7e832
|
# -*- coding: utf-8 -*-
#
# csa_topology_example.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Using CSA with Topology layers
------------------------------
This example shows a brute-force way of specifying connections between
NEST Topology layers using Connection Set Algebra instead of the
built-in connection routines.
Using the CSA requires NEST to be compiled with support for
libneurosim. For details, see Djurfeldt M, Davison AP and Eppler JM
(2014) **Efficient generation of connectivity in neuronal networks
from simulator-independent descriptions**, *Front. Neuroinform.*
http://dx.doi.org/10.3389/fninf.2014.00043
For a related example, see csa_example.py
This example uses the function GetLeaves, which is deprecated. A deprecation
warning is therefore issued. For details about deprecated functions, see
documentation.
"""
"""
First, we import all necessary modules.
"""
import nest
import nest.topology as topo
"""
Next, we check for the availability of the CSA Python module. If it
does not import, we exit with an error message.
"""
try:
import csa
haveCSA = True
except ImportError:
print("This example requires CSA to be installed in order to run.\n" +
"Please make sure you compiled NEST using\n" +
" -Dwith-libneurosim=[OFF|ON|</path/to/libneurosim>]\n" +
"and CSA and libneurosim are available from PYTHONPATH.")
import sys
sys.exit()
"""
We define a factory that returns a CSA-style geometry function for
the given layer. The function returned will return for each CSA-index
the position in space of the given neuron as a 2- or 3-element list.
This function stores a copy of the neuron positions internally,
entailing memory overhead.
"""
def geometryFunction(topologyLayer):
positions = topo.GetPosition(nest.GetLeaves(topologyLayer)[0])
def geometry_function(idx):
return positions[idx]
return geometry_function
"""
We create two layers that have 20x20 neurons of type `iaf_psc_alpha`.
"""
pop1 = topo.CreateLayer({'elements': 'iaf_psc_alpha',
'rows': 20, 'columns': 20})
pop2 = topo.CreateLayer({'elements': 'iaf_psc_alpha',
'rows': 20, 'columns': 20})
"""
For each layer, we create a CSA-style geometry function and a CSA
metric based on them.
"""
g1 = geometryFunction(pop1)
g2 = geometryFunction(pop2)
d = csa.euclidMetric2d(g1, g2)
"""
The connection set ``cs`` describes a Gaussian connectivity profile
with sigma = 0.2 and cutoff at 0.5, and two values (10000.0 and 1.0)
used as weight and delay, respectively.
"""
cs = csa.cset(csa.random * (csa.gaussian(0.2, 0.5) * d), 10000.0, 1.0)
"""
We can now connect the populations using the `CGConnect` function.
It takes the IDs of pre- and postsynaptic neurons (``pop1`` and
``pop2``), the connection set (``cs``) and a dictionary that maps
the parameters weight and delay to positions in the value set
associated with the connection set.
"""
# This is a work-around until NEST 3.0 is released. It will issue a deprecation
# warning.
pop1_gids = nest.GetLeaves(pop1)[0]
pop2_gids = nest.GetLeaves(pop2)[0]
nest.CGConnect(pop1_gids, pop2_gids, cs, {"weight": 0, "delay": 1})
"""
Finally, we use the `PlotTargets` function to show all targets in
``pop2`` starting at the center neuron of ``pop1``.
"""
topo.PlotTargets(topo.FindCenterElement(pop1), pop2)
|
tobikausk/nest-simulator
|
pynest/examples/csa_topology_example.py
|
Python
|
gpl-2.0
| 4,026
|
[
"Gaussian",
"NEURON"
] |
be7545fda30105c39dc624c02ee8a86fb6eb37da8f8cc9080c155f4778eed6d5
|
"""
hmmer3 module
"""
from mungo.mungoCore import *
from mungo.useful import smartopen, extractRootName
import sys, re, warnings
hmmer2frame = {0: 1, 1: 2, 2: 3, 3: -1, 4: -2, 5: -3}
frame2hmmer = dict([(v,k) for k,v in hmmer2frame.iteritems()])
class Domain(AbstractFeature):
"""Domain feature class"""
attributes = ["targetName","accession","targetLen","queryName",
"accession","qlen","evalue","seqScore","seqBias","i","N",
"c_evalue","i_evalue","domainScore","domainBias",
"hmmStart","hmmEnd","alnStart","alnEnd", "envStart","envEnd",
"acc","description"]
converters = zip(
["hmmStart","hmmEnd","alnStart","alnEnd","envStart","envEnd",
"seqScore","domainScore","evalue","c_evalue","i_evalue",
"seqScore", "seqBias", "domainScore", "domainBias"],
[int,int,int,int,int,int,float,float,float,float,float,float,float,float,float])
format = attributesToFormat(attributes)
def __init__(self, *args, **kw):
"""Constructor"""
super(Domain, self).__init__(*args, **kw)
self.genomic = False
def __repr__(self):
d = {}
for k,v in self.__dict__.iteritems():
d[k] = v
return self.format % d
def getSequence(self, blastdb, getAll=False, convertAccession=lambda x: x):
if getAll:
start = 0
end = 0
else:
start = self.alnStart
end = self.alnEnd
accession = convertAccession(self.accession)
h,s = blast.getSequence(blastdb, accession, start, end)
return h,s
class BlockSixFrameDomain(Domain):
def toGenomic(self):
"""Convert from 6 frame to genomic coordinates."""
prog = re.compile('\.|-|\:')
chrom,blockStart,blockEnd,hmmerFrame = prog.split(self.targetName)
blockStart = int(blockStart)
blockEnd = int(blockEnd)
L = blockEnd-blockStart+1
hmmerFrame = int(hmmerFrame)
frame = hmmer2frame[hmmerFrame]
if frame>0:
strand = '+'
else:
strand = '-'
gStart,gEnd = convertSixFrameToGenomic(self.alnStart, self.alnEnd, frame, L)
self.genomic = True
self.targetName = chrom
self.alnStart = gStart
self.alnEnd = gEnd
self.strand = strand
def convertSixFrameToGenomic(start, end, frame, L):
"""Convert 6 frame coords to genomic.
@param start: Amino acid start coord
@param end: Amino acid end coord
@param frame: Frame
@param L: Nucleotide seq length
@return: (gStart, gEnd, strand)
"""
if frame>=0:
gStart = 3*(start-1)+(frame-1)+1
gEnd = 3*(end-1)+(frame-1)+3
else:
gStart = L-(3*(start-1)+abs(frame)-1)
gEnd = L-(3*(end-1)+abs(frame)+1)
return gStart,gEnd
def HmmerFile(iFileHandle, **kw):
"Factory function returning a HmmerFileReader"
return DomainHitsReader(iFileHandle, **kw)
class DomainHitsReader(AbstractDataReader):
def __init__(self, iFileHandle, seqType=None, eValueCutoff=None, scoreCutoff=None):
self.seqType = seqType
self.eValueCutoff = eValueCutoff
self.scoreCutoff = scoreCutoff
super(DomainHitsReader, self).__init__(iFileHandle)
def _generator(self):
"""Return an iterator to a HMMer file."""
if self.seqType in [Domain, BlockSixFrameDomain]:
_Domain = self.seqType
elif self.seqType=='SixFrame':
_Domain = SixFrameDomain
elif self.seqType=='BlockSixFrame':
_Domain = BlockSixFrameDomain
else:
_Domain = Domain
for line in self.iFile:
line = line.strip()
if line[0]=="#": continue
tokens = line.split()
d = _Domain(dict(zip(Domain.attributes, tokens)))
if (self.eValueCutoff and d.eValue>self.eValueCutoff) or \
(self.scoreCutoff and d.score<self.scoreCutoff): continue
yield d
|
PapenfussLab/Mungo
|
mungo/hmmer3.py
|
Python
|
artistic-2.0
| 4,052
|
[
"BLAST"
] |
13c3a619e3b7f2e166ed2cfc63df41885f4d5d511aaa2b05d49ffe2e522ce1b6
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import codecs
from collections import OrderedDict
import json
import os
import zipfile
import click
click.disable_unicode_literals_warning = True
import requests
from commoncode import fileutils
from commoncode import fetch
import licensedcode
from licensedcode.cache import get_licenses_db
from licensedcode.cache import get_index
from licensedcode.models import load_licenses
from licensedcode.models import License
"""
Sync and update the ScanCode licenses against:
- the SPDX license list
- the DejaCode licenses
Run python synclic.py -h for help.
"""
TRACE = False
TRACE_DEEP = False
TRACE_FETCH = False
class ExternalLicensesSource(object):
"""
Base class to provide (including possibly fetch) licenses from an
external source and expose these as licensedcode.models.License
objects
"""
# `matching_key` is the License object attribute to use as a key: one
# of "key" or "spdx_license_key".
matching_key = None
# tuple of ScanCode reference license attributes that can be updated
# from this source
updatable_attributes = tuple()
# tuple of ScanCode reference license attributes that cannot be updated
# from this source. They can only be set when creating a new license.
non_updatable_attributes = tuple()
def __init__(self, src_dir, match_text=False, match_approx=False):
"""
`src_dir` is where the License objects are dumped.
"""
src_dir = os.path.realpath(src_dir)
self.src_dir = src_dir
self.match_text = match_text
self.match_approx = match_approx
self.fetched = False
if os.path.exists(src_dir):
# fetch ONLY if the directory is empty
self.fetched = True
else:
os.mkdir(src_dir)
self.update_dir = self.src_dir.rstrip('\\/') + '-update'
if not os.path.exists(self.update_dir):
os.mkdir(self.update_dir)
self.new_dir = self.src_dir.rstrip('\\/') + '-new'
if not os.path.exists(self.new_dir):
os.mkdir(self.new_dir)
self.del_dir = self.src_dir.rstrip('\\/') + '-del'
if not os.path.exists(self.del_dir):
os.mkdir(self.del_dir)
self.scancodes_by_key = get_licenses_db()
self.scancodes_by_spdx_key = {l.spdx_license_key.lower(): l
for l in self.scancodes_by_key.values()
if l.spdx_license_key}
composites_dir = os.path.join(licensedcode.data_dir, 'composites', 'licenses')
self.composites_by_key = load_licenses(composites_dir, with_deprecated=True)
self.composites_by_spdx_key = {l.spdx_license_key.lower(): l
for l in self.composites_by_key.values()
if l.spdx_license_key}
foreign_dir = os.path.join(licensedcode.data_dir, 'non-english', 'licenses')
self.non_english_by_key = load_licenses(foreign_dir, with_deprecated=True)
self.non_english_by_spdx_key = {l.spdx_license_key.lower(): l
for l in self.non_english_by_key.values()
if l.spdx_license_key}
def fetch_licenses(self):
"""
Yield License objects fetched from this external source.
Store the metadata and texts in self.src_dir as a side effect.
"""
raise NotImplementedError
def get_licenses(self):
"""
Return a mapping of key -> ScanCode License objects either
fetched externally or loaded from the existing `self.src_dir`
"""
if self.fetched:
print('Reusing (possibly modified) external licenses stored in:', self.update_dir)
return load_licenses(self.update_dir, with_deprecated=True)
else:
print('Fetching and storing external licenses in:', self.src_dir)
licenses = {l.key: l for l in self.fetch_licenses()}
print('Stored %d external licenses in: %r.' % (len(licenses), self.src_dir,))
fileutils.copytree(self.src_dir, self.update_dir)
print('Modified external licenses will be in: %r.' % (self.update_dir,))
print('New external licenses will be in: %r.' % (self.new_dir,))
print('Deleted external licenses will be in: %r.' % (self.del_dir,))
return load_licenses(self.update_dir, with_deprecated=True)
def find_key(self, key, text):
"""
Return a ScanCode license key string or None given an existing key and a license text.
"""
keyl = key.lower()
if self.matching_key == 'key':
if keyl in self.scancodes_by_key:
if TRACE_DEEP: print('Other license key in ScanCode:', key, end='. ')
return keyl
elif self.matching_key == 'spdx_license_key':
if keyl in self.scancodes_by_spdx_key:
sckey = self.scancodes_by_spdx_key[keyl].key
if TRACE_DEEP: print('Other license key in ScanCode as:', sckey, 'for SPDX:', key, end='. ')
return sckey
if self.match_text:
if TRACE_DEEP: print('Matching text for:', key, end='. ')
new_key, exact, score = get_match(text)
if not new_key:
if TRACE_DEEP: print('SKIPPED: Other license key not MATCHED in ScanCode:', key, end='. ')
return None
if exact is True and score == 100:
if TRACE_DEEP: print('Other license key not in ScanCode: EXACT match to:', new_key, end='. ')
return new_key
if self.match_approx:
if exact is False:
if TRACE_DEEP: print('Other license key not in ScanCode but OK matched to:', new_key, 'with score:', score, end='. ')
return new_key
if exact is None:
if TRACE_DEEP: print('Other license key not in ScanCode: WEAK matched to:', new_key, 'with score:', score, end='. ')
return new_key
if exact == -1:
if TRACE_DEEP: print('Other license key not in ScanCode: JUNK MATCH to:', new_key, 'with score:', score, end='. ')
return new_key
else:
if TRACE_DEEP: print('SKIPPED: Other license key weakly matched in ScanCode: JUNK MATCH to:', new_key, 'with score:', score, end='. ')
def save_license(self, key, mapping, text):
"""
Return a ScanCode License for `key` constructed from a `mapping`
of attributes and a license `text`. Save the license metadata
and its text in the `self.src_dir`.
"""
new_key = None
if self.matching_key == 'key':
new_key = self.find_key(key, text)
elif self.matching_key == 'spdx_license_key':
new_key = self.find_key(mapping['spdx_license_key'], text)
if not new_key:
key = key.lower()
if TRACE: print(' No Scancode key found. USING key as:', key)
else:
if key == new_key:
if TRACE: print(' Scancode key found:', key)
else:
if TRACE: print(' Scancode key found:', new_key, 'CHANGED from:', key)
key = new_key
lic = License(key=key, src_dir=self.src_dir)
for name, value in mapping.items():
setattr(lic, name, value)
with codecs.open(lic.text_file, 'wb', encoding='utf-8')as tf:
tf.write(text)
lic.dump()
return lic
def get_response(url, headers, params):
"""
Return a native Python object of the JSON response of a GET HTTP
request at `url` with `headers` and `params`.
"""
if TRACE_FETCH: print('==> Fetching URL: %(url)s' % locals())
response = requests.get(url, headers=headers, params=params)
status = response.status_code
if status != requests.codes.ok: # @UndefinedVariable
raise Exception('Failed HTTP request for %(url)r: %(status)r' % locals())
return response.json(object_pairs_hook=OrderedDict)
def get_match(text):
"""
Return a tuple of:
(top matched license key or None,
(True if this an exact match, False if the match is ok, None if the match is weak,
the matched score).
"""
idx = get_index()
matches = list(idx.match(query_string=text, min_score=80))
if not matches:
return None, None, 0
match = matches[0]
query = match.query
query_len = len(query.whole_query_run().tokens)
rule = match.rule
key = rule.licenses[0]
is_exact = (
len(matches) == 1
and rule.is_license and len(rule.licenses) == 1
and match.matcher == '1-hash'
and match.score() == 100
and match.qlen == query_len
)
if is_exact:
return key, True, 100
is_ok = (
len(rule.licenses) == 1
and match.coverage() > 95
and match.score() > 95)
if is_ok:
return key, False, match.score()
is_weak = (
len(rule.licenses) == 1
and match.coverage() > 90
and match.score() > 90)
if is_weak:
return key, None, match.score()
if match.score() > 85:
# junk match
return key, -1, match.score()
else:
return None, None, None
class SpdxSource(ExternalLicensesSource):
"""
License source for the latest SPDX license list fetched from GitHub.
"""
matching_key = 'spdx_license_key'
updatable_attributes = (
'spdx_license_key',
'other_urls',
'is_deprecated',
'is_exception',
# NOT YET: 'standard_notice',
)
non_updatable_attributes = (
'short_name',
'name',
'notes',
)
def fetch_licenses(self):
"""
Yield all the latest License object from the latest SPDX license list.
Store the texts in the license_dir.
"""
# get latest tag
tags_url = 'https://api.github.com/repos/spdx/license-list-data/tags'
tags = get_response(tags_url, headers={}, params={})
tag = tags[0]['name']
# fetch licenses and exceptions
# note that exceptions data have -- weirdly enough -- a different schema
zip_url = 'https://github.com/spdx/license-list-data/archive/%(tag)s.zip' % locals()
if TRACE_FETCH: print('Fetching SPDX license data from:', zip_url)
licenses_zip = fetch.download_url(zip_url, timeout=120)
with zipfile.ZipFile(licenses_zip) as archive:
for path in archive.namelist():
if not (path.endswith('.json')
and ('/json/details/' in path or '/json/exceptions/' in path)):
continue
if TRACE_FETCH: print('Loading license:', path)
if path.endswith('+.json'):
# Skip the old plus licenses. We use them in
# ScanCode, but they are deprecated in SPDX.
continue
details = json.loads(archive.read(path))
lic = self._build_license(details)
if lic:
yield lic
def _build_license(self, mapping):
"""
Return a ScanCode License object built from an SPDX license
mapping.
"""
spdx_license_key = mapping.get('licenseId') or mapping.get('licenseExceptionId')
assert spdx_license_key
spdx_license_key = spdx_license_key.strip()
key = spdx_license_key.lower()
deprecated = mapping.get('isDeprecatedLicenseId', False)
if deprecated:
# we use concrete keys for some plus/or later versions for
# simplicity and override SPDX deprecation for these
if key.endswith('+'):
# 'gpl-1.0+', 'gpl-2.0+', 'gpl-3.0+',
# 'lgpl-2.0+', 'lgpl-2.1+', 'lgpl-3.0+',
# 'gfdl-1.1+', 'gfdl-1.2+', 'gfdl-1.3+'
# 'agpl-3.0+'
deprecated = False
else:
if key not in self.scancodes_by_spdx_key:
if TRACE: print('Skipping deprecated license not in ScanCode:', key)
return
# TODO: Not yet available in ScanCode
is_composite = key in self.composites_by_spdx_key
if is_composite:
# skip composite for now until they are properly handled in ScanCode
if TRACE: print('Skipping composite license FOR NOW:', key)
return
# TODO: Not yet available in ScanCode
is_foreign = key in self.non_english_by_spdx_key
if is_foreign:
if TRACE: print('Skipping NON-english license FOR NOW:', key)
return
other_urls = mapping.get('seeAlso', [])
other_urls = (o for o in other_urls if o)
other_urls = (o.strip() for o in other_urls)
other_urls = (o for o in other_urls if o)
# see https://github.com/spdx/license-list-data/issues/9
junk_see_also = ('none', 'found')
other_urls = (o for o in other_urls if o not in junk_see_also)
other_urls = list(other_urls)
# notes = mapping.get('licenseComments')
# if notes and notes.strip():
# notes = 'Per SPDX.org, ' + ' '.join(notes.split())
standard_notice = mapping.get('standardLicenseHeader')
if standard_notice:
standard_notice = standard_notice.strip()
lic = dict(
spdx_license_key=spdx_license_key,
name=mapping['name'].strip(),
is_deprecated=deprecated,
is_exception=bool(mapping.get('licenseExceptionId')),
other_urls=other_urls,
# TODO: the formatting might need to be preserved
standard_notice=standard_notice,
# FIXME: Do we really want to carry notes over???
# notes=notes,
# FIXME: available in ScanCode but as an OSI URL
# we should check if we have the osi_url when this flag is there
# osi_url = mapping.get('isOsiApproved', False)
# TODO: detect licenses on these texts to ensure they match?
# TODO: add rule? and test license detection???
# standard_template = mapping('standardLicenseTemplate')
# exception_example
# example = mapping.get('example')
)
text = mapping.get('licenseText') or mapping.get('licenseExceptionText')
text = text.strip()
return self.save_license(key, lic, text)
class DejaSource(ExternalLicensesSource):
"""
License source for DejaCode licenses fetched through its API.
"""
matching_key = 'key'
updatable_attributes = (
'short_name',
'name',
'spdx_license_key',
'homepage_url',
'category',
'owner',
'text_urls',
'osi_url',
'faq_url',
'other_urls',
'is_deprecated',
'is_exception',
# NOT YET: 'standard_notice',
# Not yet available in ScanCode
# 'is_composite',
)
non_updatable_attributes = (
'notes',
)
def __init__(self, src_dir, match_text=False, match_approx=False,
api_base_url=None, api_key=None):
super(DejaSource, self).__init__(src_dir, match_text, match_approx)
self.api_base_url = api_base_url or os.environ.get('DEJACODE_API_URL', None)
self.api_key = api_key or os.environ.get('DEJACODE_API_KEY', None)
assert (self.api_key and self.api_base_url), (
'You must set the DEJACODE_API_URL and DEJACODE_API_KEY ' +
'environment variables before running this script.')
def fetch_licenses(self):
api_url = '/'.join([self.api_base_url.rstrip('/'), 'licenses/'])
for licenses in call_deja_api(api_url, self.api_key, paginate=100):
for lic in licenses:
dlic = self._build_license(lic)
if dlic:
yield dlic
def _build_license(self, mapping):
"""
Return a ScanCode License object built from a DejaCode license
mapping or None for skipped licenses.
"""
key = mapping['key']
# TODO: Not yet available in ScanCode
is_composite = key in self.composites_by_key
if is_composite:
# skip composite for now until they are properly handled in ScanCode
if TRACE: print('Skipping composite license FOR NOW:', key)
return
# TODO: Not yet available in ScanCode
is_foreign = key in self.non_english_by_key
if is_foreign:
if TRACE: print('Skipping NON-english license FOR NOW:', key)
return
# these license are rare commercial license with no text and only a link
# we ignore these
dejacode_special_no_text = set([
'alglib-commercial',
'atlassian-customer-agreement',
'dalton-maag-eula',
'highsoft-standard-license-agreement-4.0',
'monotype-tou',
# junk duplicate of fsf-ap
'laurikari',
])
is_special = key in dejacode_special_no_text
if is_special:
# skip composite for now until they are properly handled in ScanCode
if TRACE: print('Skipping special DejaCode license with NO TEXT FOR NOW:', key)
return
deprecated = not mapping.get('is_active')
if deprecated and key not in self.scancodes_by_key:
if TRACE: print('Skipping deprecated license not in ScanCode:', key)
return
lic = dict(
short_name=mapping['short_name'],
name=mapping['name'],
homepage_url=mapping['homepage_url'],
category=mapping['category'],
owner=mapping['owner_name'],
# FIXME: we may not want to carry notes over???
# lic.notes = mapping.notes
spdx_license_key=mapping['spdx_license_key'],
text_urls=mapping['text_urls'].splitlines(False),
osi_url=mapping['osi_url'],
faq_url=mapping['faq_url'],
other_urls=mapping['other_urls'].splitlines(False),
is_exception=mapping.get('is_exception', False),
is_deprecated=deprecated,
standard_notice=mapping['standard_notice'],
)
text = mapping['full_text']
return self.save_license(key, lic, text)
def check_owners(self):
"""
Chek that all ScanCcode licenses have a proper owner.
"""
downers = set()
api_url = '/'.join([self.api_base_url.rstrip('/'), 'owners/'])
for owners in call_deja_api(api_url, self.api_key, paginate=100):
print('.')
downers.update(o['name'] for o in owners)
for lic in self.scancodes_by_key.values():
if not lic.owner or lic.owner not in downers:
print('ScanCode license with incorrect owner:', lic.key, ':', lic.owner)
for lic in self.composites_by_key.values():
if not lic.owner or lic.owner not in downers:
print('ScanCode Composite license with incorrect owner:', lic.key, ':', lic.owner)
def call_deja_api(api_url, api_key, paginate=0, headers=None, params=None):
"""
Yield result mappings from the reponses of calling the API at
`api_url` with `api_key` . Raise an exception on failure.
Pass `headers` and `params` mappings to the
underlying request if provided.
If `paginate` is a non-zero attempt to paginate with `paginate`
number of pages at a time and return all the results.
"""
headers = headers or {
'Authorization': 'Token {}'.format(api_key),
'Accept': 'application/json; indent=2',
}
params = params or {}
def _get_results(response):
return response.json(object_pairs_hook=OrderedDict)
if paginate:
assert isinstance(paginate, int)
params['page_size'] = paginate
first = True
while True:
response = get_response(api_url, headers, params)
if first:
first = False
# use page_size only on first call.
# the next URL already contains the page_size
params.pop('page_size')
yield response.get('results', [])
api_url = response.get('next')
if not api_url:
break
else:
response = get_response(api_url, headers, params)
yield response.get('results', [])
SOURCES = {
'dejacode': DejaSource,
'spdx': SpdxSource,
}
def merge_licenses(scancode_license, other_license, updatable_attributes):
"""
Compare and update two License objects in-place given a sequence of
`updatable_attributes`.
Return a two-tuple of lists as:
(scancode license updates, other license updates)
Each list item is a three-tuple of:
(attribute name, value before, value after)
"""
scancode_updated = []
def update_sc(_attrib, _sc_val, _o_val):
setattr(scancode_license, _attrib, _o_val)
scancode_updated.append((_attrib, _sc_val, _o_val))
other_updated = []
def update_ot(_attrib, _sc_val, _o_val):
setattr(other_license, _attrib, _sc_val)
other_updated.append((_attrib, _o_val, _sc_val))
skey = scancode_license.key
okey = other_license.key
if skey != okey:
raise Exception('Non mergeable licenses with different keys: %(skey)s <> %(okey)s' % locals())
# if scancode_license.spdx_license_key != other_license.spdx_license_key:
# pass
# else:
# if TRACE:
# print('Merging licenses with different keys, but same SPDX key: %(skey)s <> %(okey)s' % locals())
# update_ot('key', skey, okey)
for attrib in updatable_attributes:
sc_val = getattr(scancode_license, attrib)
o_val = getattr(other_license, attrib)
# for boolean flags, the other license wins. But only for True.
# all our flags are False by default
if isinstance(sc_val, bool) and isinstance(o_val, bool):
if sc_val is False and sc_val != o_val:
update_sc(attrib, sc_val, o_val)
continue
if isinstance(sc_val, (list, tuple)) and isinstance(o_val, (list, tuple)):
norm_sc_val = set(s for s in sc_val if s and s.strip())
norm_o_val = set(s for s in o_val if s and s.strip())
# special case for URL lists, we consider all URL fields to
# update
if attrib.endswith('_urls',):
all_sc_urls = set(list(norm_sc_val)
+ scancode_license.text_urls
+ scancode_license.other_urls
+ [scancode_license.homepage_url,
scancode_license.osi_url,
scancode_license.faq_url])
all_sc_urls = set(u for u in all_sc_urls if u)
new_other_urls = norm_o_val.difference(all_sc_urls)
# add other urls to ScanCode
combined = norm_sc_val | new_other_urls
if set(norm_sc_val) != combined:
update_sc(attrib, sc_val, sorted(combined))
# FIXME: FOR NOW WE DO NOT UPDATE THE OTHER SIDE with ScanCode URLs
else:
# merge ScanCode and other value lists
combined = norm_sc_val | norm_o_val
if combined == norm_sc_val:
pass
else:
update_sc(attrib, sc_val, sorted(combined))
# FIXME: FOR NOW WE DO NOT UPDATE THE OTHER SIDE with ScanCode seqs
continue
if isinstance(sc_val, basestring) and isinstance(o_val, basestring):
# keep the stripped and normalized spaces value
# normalized spaces
norm_sc_val = ' '.join(sc_val.split())
norm_o_val = ' '.join(o_val.split())
# Fix up values with normalized values
if sc_val != norm_sc_val:
sc_val = norm_sc_val
update_sc(attrib, sc_val, norm_sc_val)
if o_val != norm_o_val:
o_val = norm_o_val
update_ot(attrib, o_val, norm_o_val)
scancode_equals_other = sc_val == o_val
if scancode_equals_other:
continue
other_is_empty = sc_val and not o_val
if other_is_empty:
update_ot(attrib, sc_val, o_val)
continue
scancode_is_empty = not sc_val and o_val
if scancode_is_empty:
update_sc(attrib, sc_val, o_val)
continue
# on difference, the other license wins
if sc_val != o_val:
update_sc(attrib, sc_val, o_val)
continue
return scancode_updated, other_updated
def synchronize_licenses(external_source):
"""
Update the ScanCode licenses data and texts in-place (e.g. in their
current storage directory) from an `external_source`
ExternalLicensesSource.
New licenses are created in external_source.new_dir
Modified external licenses are updated in external_source.update_dir
The process is to:
1. Fetch external license using the `external_source` and store these.
2. Compare and update ScanCode licenses with these external licenses.
"""
# mappings of key -> License
scancodes_by_key = external_source.scancodes_by_key
others_by_key = external_source.get_licenses()
# track changes with sets of license keys
same = set()
scancodes_added = set()
others_added = set()
scancodes_changed = set()
others_changed = set()
# FIXME: track deprecated
# removed = set()
# 1. iterate scancode licenses and compare with other
for sc_key, sc_license in scancodes_by_key.items():
if not TRACE:print('.', end='')
# does this scancode key exists in others?
ot_license = others_by_key.get(sc_key)
if not ot_license:
if TRACE: print('ScanCode license key not in Other: created new other:', sc_key)
ot_license = sc_license.relocate(external_source.new_dir)
others_added.add(ot_license.key)
others_by_key[ot_license.key] = ot_license
continue
# the key exist in scancode
sc_updated, ot_updated = merge_licenses(
sc_license, ot_license, external_source.updatable_attributes)
if not sc_updated and not ot_updated:
# if TRACE: print('Licenses attributes are identical:', sc_license.key)
same.add(sc_license.key)
if sc_updated:
if TRACE: print('ScanCode license updated:', sc_license.key, end='. Attributes: ')
for attrib, oldv, newv in sc_updated:
if TRACE: print(' %(attrib)s: %(oldv)r -> %(newv)r' % locals())
scancodes_changed.add(sc_license.key)
if ot_updated:
if TRACE: print('Other license updated:', sc_license.key, end='. Attributes: ')
for attrib, oldv, newv in ot_updated:
if TRACE: print(' %(attrib)s: %(oldv)r -> %(newv)r' % locals())
others_changed.add(sc_license.key)
# 2. iterate other licenses and compare with ScanCode
for o_key, ot_license in others_by_key.items():
# does this key exists in scancode?
sc_license = scancodes_by_key.get(o_key)
if sc_license:
# we already dealt with this in the first loop
continue
if not TRACE:print('.', end='')
# Create a new ScanCode license
sc_license = ot_license.relocate(licensedcode.licenses_data_dir, o_key)
scancodes_added.add(sc_license.key)
scancodes_by_key[sc_license.key] = sc_license
if TRACE: print('Other license key not in ScanCode:', ot_license.key, 'created in ScanCode.')
# finally write changes
for k in scancodes_changed | scancodes_added:
scancodes_by_key[k].dump()
for k in others_changed | others_added:
others_by_key[k].dump()
# TODO: at last: print report of incorrect OTHER licenses to submit
# updates eg. make API calls to DejaCode to create or update
# licenses and submit review request e.g. submit requests to SPDX
# for addition
print()
print('#####################################################')
print('Same licenses:', len(same))
print('ScanCode: Added :', len(scancodes_added))
print('ScanCode: Changed:', len(scancodes_changed))
print('External: Added :', len(others_added))
print('External: Changed:', len(others_changed))
print('#####################################################')
for key in sorted(others_added):
lic = others_by_key[key]
lic.dump()
if not lic.owner:
print('New other license without owner:', key)
@click.command()
@click.argument('license_dir', type=click.Path(), metavar='DIR')
@click.option('-s', '--source', type=click.Choice(SOURCES), help='Select an external license source.')
@click.option('-m', '--match-text', is_flag=True, default=False, help='Match external license texts with license detection to fin a matching ScanCode license.')
@click.option('-a', '--match-approx', is_flag=True, default=False, help='Include approximate license detection matches for finding a matching ScanCode license.')
@click.option('-c', '--clean', is_flag=True, default=False, help='Clean directories (original, update, new, del) if they exist.')
@click.option('-t', '--trace', is_flag=True, default=False, help='Print execution trace.')
@click.help_option('-h', '--help')
def cli(license_dir, source, trace, clean, match_text=False, match_approx=False):
"""
Synchronize ScanCode licenses with an external license source.
DIR is the directory to store (or load) external licenses.
When using the dejacode source your need to set the
'DEJACODE_API_URL' and 'DEJACODE_API_KEY' environment variables with
your credentials.
"""
global TRACE
TRACE = trace
if clean:
fileutils.delete(license_dir)
fileutils.delete(license_dir.rstrip('/\\') + '-new')
fileutils.delete(license_dir.rstrip('/\\') + '-update')
fileutils.delete(license_dir.rstrip('/\\') + '-del')
source_cls = SOURCES[source]
source = source_cls(license_dir, match_text, match_approx)
synchronize_licenses(source)
print()
if __name__ == '__main__':
cli()
|
yashdsaraf/scancode-toolkit
|
etc/scripts/synclic.py
|
Python
|
apache-2.0
| 32,006
|
[
"Dalton",
"VisIt"
] |
b2df38265ad0045aa78a04d216632e441f2f36b6ba359bd578f9ed88af463dc7
|
import numpy as np
from ase.atoms import Atoms
from ase.units import Hartree
from ase.parallel import paropen
from ase.calculators.singlepoint import SinglePointCalculator
def write_xsf(fileobj, images, data=None):
if isinstance(fileobj, str):
fileobj = paropen(fileobj, 'w')
if not isinstance(images, (list, tuple)):
images = [images]
fileobj.write('ANIMSTEPS %d\n' % len(images))
numbers = images[0].get_atomic_numbers()
pbc = images[0].get_pbc()
if pbc[2]:
fileobj.write('CRYSTAL\n')
elif pbc[1]:
fileobj.write('SLAB\n')
elif pbc[0]:
fileobj.write('POLYMER\n')
else:
fileobj.write('MOLECULE\n')
for n, atoms in enumerate(images):
if pbc.any():
fileobj.write('PRIMVEC %d\n' % (n + 1))
cell = atoms.get_cell()
for i in range(3):
fileobj.write(' %.14f %.14f %.14f\n' % tuple(cell[i]))
fileobj.write('PRIMCOORD %d\n' % (n + 1))
# Get the forces if it's not too expensive:
calc = atoms.get_calculator()
if (calc is not None and
(hasattr(calc, 'calculation_required') and
not calc.calculation_required(atoms,
['energy', 'forces', 'stress']))):
forces = atoms.get_forces()
else:
forces = None
pos = atoms.get_positions()
fileobj.write(' %d 1\n' % len(pos))
for a in range(len(pos)):
fileobj.write(' %2d' % numbers[a])
fileobj.write(' %20.14f %20.14f %20.14f' % tuple(pos[a]))
if forces is None:
fileobj.write('\n')
else:
fileobj.write(' %20.14f %20.14f %20.14f\n' % tuple(forces[a]))
if data is None:
return
fileobj.write('BEGIN_BLOCK_DATAGRID_3D\n')
fileobj.write(' data\n')
fileobj.write(' BEGIN_DATAGRID_3Dgrid#1\n')
data = np.asarray(data)
if data.dtype == complex:
data = np.abs(data)
shape = data.shape
fileobj.write(' %d %d %d\n' % shape)
cell = atoms.get_cell()
origin = np.zeros(3)
for i in range(3):
if not pbc[i]:
origin += cell[i] / shape[i]
fileobj.write(' %f %f %f\n' % tuple(origin))
for i in range(3):
fileobj.write(' %f %f %f\n' %
tuple(cell[i] * (shape[i] + 1) / shape[i]))
for x in range(shape[2]):
for y in range(shape[1]):
fileobj.write(' ')
fileobj.write(' '.join(['%f' % d for d in data[x, y]]))
fileobj.write('\n')
fileobj.write('\n')
fileobj.write(' END_DATAGRID_3D\n')
fileobj.write('END_BLOCK_DATAGRID_3D\n')
def read_xsf(fileobj, index=-1, read_data=True):
if isinstance(fileobj, str):
fileobj = open(fileobj)
readline = fileobj.readline
while True:
line = readline()
if line[0] != '#':
line = line.strip()
break
if 'ANIMSTEPS' in line:
nimages = int(line.split()[1])
line = readline().strip()
else:
nimages = 1
if 'CRYSTAL' in line:
pbc = True
elif 'SLAB' in line:
pbc = (True, True, False)
elif 'POLYMER' in line:
pbc = (True, False, False)
else:
pbc = False
images = []
for n in range(nimages):
cell = None
if pbc:
line = readline().strip()
assert 'PRIMVEC' in line
cell = []
for i in range(3):
cell.append([float(x) for x in readline().split()])
line = readline().strip()
assert 'PRIMCOORD' in line
natoms = int(readline().split()[0])
numbers = []
positions = []
for a in range(natoms):
line = readline().split()
numbers.append(int(line[0]))
positions.append([float(x) for x in line[1:]])
positions = np.array(positions)
if len(positions[0]) == 3:
forces = None
else:
positions = positions[:, :3]
forces = positions[:, 3:] * Hartree
image = Atoms(numbers, positions, cell=cell, pbc=pbc)
if forces is not None:
image.set_calculator(SinglePointCalculator(None, forces, None,
None, image))
images.append(image)
if read_data:
line = readline()
assert 'BEGIN_BLOCK_DATAGRID_3D' in line
line = readline()
assert 'BEGIN_DATAGRID_3D' in line
shape = [int(x) for x in readline().split()]
start = [float(x) for x in readline().split()]
for i in range(3):
readline()
n_data = shape[0]*shape[1]*shape[2]
data = np.array([float(readline())
for s in range(n_data)]).reshape(shape[::-1])
data = np.swapaxes(data, 0, 2)
return data, images[index]
return images[index]
|
JConwayAWT/PGSS14CC
|
lib/python/multimetallics/ase/io/xsf.py
|
Python
|
gpl-2.0
| 5,028
|
[
"ASE",
"CRYSTAL"
] |
878b183d639f63a5c0eedbfa8ea0cfa110a46fb5d902eca59c865ee835f9f934
|
"""
This file is part of the KnownSourceMatcher.
KnownSourceMatcher is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
KnownSourceMatcher is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with KnownSourceMatcher. If not, see <http://www.gnu.org/licenses/>.
File name: ProfileOperationsInterface.py
Created: February 10th, 2014
Author: Rob Lyon
Contact: rob@scienceguyrob.com or robert.lyon@postgrad.manchester.ac.uk
Web: <http://www.scienceguyrob.com> or <http://www.cs.manchester.ac.uk>
or <http://www.jb.man.ac.uk>
This code runs on python 2.4 or later.
This file defines an interface for the operations that can be run on pulsar profile
data loaded from phcx or pfd files.
"""
# Scipy/numpy imports.
from numpy import ceil
from scipy import stats
# Custom file imports.
from Utilities import Utilities
# ****************************************************************************************************
#
# CLASS DEFINITION
#
# ****************************************************************************************************
class ProfileOperationsInterface(Utilities):
"""
An interface that defines the functions which must be implemented in order
to produce candidate scores.
If you want to create a new score generation method simply create a sub-class of this file,
and implement the required functions. This makes the code much more modular.
"""
# ****************************************************************************************************
#
# Functions.
#
# ****************************************************************************************************
def __init__(self,debugFlag):
Utilities.__init__(self,debugFlag)
# ****************************************************************************************************
#
# Sinusoid Fittings
#
# ****************************************************************************************************
def getSinusoidFittings(self,profile):
raise NotImplementedError("Please Implement this method")
def fitSineSqr(self,yData,maxima):
raise NotImplementedError("Please Implement this method")
# ****************************************************************************************************
#
# Gaussian Fittings
#
# ****************************************************************************************************
def getGaussianFittings(self,profile):
raise NotImplementedError("Please Implement this method")
def fitGaussian(self,xData,yData):
raise NotImplementedError("Please Implement this method")
def fitGaussianFixedWidthBins(self,xData,yData,bins):
raise NotImplementedError("Please Implement this method")
def fitGaussianWithBackground(self,xData,yData):
raise NotImplementedError("Please Implement this method")
def fitGaussianT1(self,yData):
raise NotImplementedError("Please Implement this method")
def fitDoubleGaussianT2(self,yData):
raise NotImplementedError("Please Implement this method")
def fitDoubleGaussian(self,yData):
raise NotImplementedError("Please Implement this method")
def fitDoubleGaussianWithBackground(self,yData,p0):
raise NotImplementedError("Please Implement this method")
# ****************************************************************************************************
#
# Candidate Parameter Functions
#
# ****************************************************************************************************
def getCandidateParameters(self,profile):
raise NotImplementedError("Please Implement this method")
# ****************************************************************************************************
#
# DM Curve Fitting Functions
#
# ****************************************************************************************************
def getDMFittings(self,data):
raise NotImplementedError("Please Implement this method")
# ****************************************************************************************************
#
# Sub-band Functions
#
# ****************************************************************************************************
def getSubbandParameters(self,data=None,profile=None):
raise NotImplementedError("Please Implement this method")
# ****************************************************************************************************
#
# Utility Functions
#
# ****************************************************************************************************
def freedmanDiaconisRule(self,data):
"""
Calculate number of bins to use in histogram according to this rule.
Parameters:
data - a numpy.ndarray containing the data for which a histogram is to be computed.
Returns:
The 'optimal' number of bins for the histogram.
"""
# interquartile range, Q3-Q1....
iqr = stats.scoreatpercentile(data, 75) - stats.scoreatpercentile(data, 25)
binwidth = 2 * iqr * pow(len(data), -0.3333333)
if(binwidth<=0):
binwidth=60
# calculate n bins
rnge = max(data) - min(data)
nbins = ceil( rnge / binwidth )
if(self.debug):
print "\tIQR: ",iqr
print "\tBin Width: ",binwidth
print "\tRange: ",rnge
print "\tNumber of bins: ", nbins
return int(nbins)
# ****************************************************************************************************
def getDerivative(self,yData):
"""
Obtains the derivative for the y data points by simply performing,
dy = y[i] - y[i+1] .
Parameters:
yData - a numpy.ndarray containing data (y-axis data).
Returns:
The changes in y, dy, for each point in yData as an array.
"""
dy = []
dataPoints = len(yData)-1 # Since there are n data points, with only n-1 line segments joining them.
for i in range(dataPoints):
dy.append(yData[i] - yData[i+1])
return dy
# ****************************************************************************************************
def scale(self,x,min_,max_,newMin,newMax):
"""
Re-scales a data value occurring in the range min and max, the
a new data range given by newMin and newMax.
Parameter:
x - the data value to rescale.
min_ - the minimum value of the original data range for x.
max_ - the maximum value of the original data range for x.
newMin - the minimum value of the new data range for x.
newMax - the maximum value of the new data range for x.
Returns:
A new array with the data scaled to within the range [newMin,newMax].
"""
x = (newMin * (1-( (x-min_) /( max_-min_ )))) + (newMax * ( (x-min_) /( max_-min_ ) ))
return x
# ****************************************************************************************************
|
scienceguyrob/KnownSourceMatcher
|
KnownSourceMatcher/src/Match/ProfileOperationsInterface.py
|
Python
|
gpl-2.0
| 7,935
|
[
"Gaussian"
] |
79df2c390c685a5e768b0fe05f62fca4cea1986e0ff16afad486a168fba37ea8
|
"""
ClangVisitor
"""
import os
class ClangVisitor(object):
def __init__(self):
self.prefixes = []
def run(self, cursor):
self.visit(cursor)
def set_prefixes(self, prefixes):
self.prefixes = prefixes
def is_in_prefixes(self, path):
if len(self.prefixes) == 0:
return False
for p in self.prefixes:
if path.startswith(p):
return True
return False
def visit(self, cursor):
name = cursor.kind.name
method = getattr(self, 'enter_' + name, None)
if method is not None:
res = method(cursor)
if res is not None:
res.run(cursor)
for c in cursor.get_children():
if c.location.file:
abs_loc = os.path.abspath(c.location.file.name)
if self.is_in_prefixes(abs_loc):
self.visit(c)
method = getattr(self, 'exit_' + name, None)
if method is not None:
method(cursor)
|
svperbeast/plain_data_companion
|
src/visitors/clang_visitor.py
|
Python
|
mit
| 1,033
|
[
"VisIt"
] |
7221022bc7782eb34b52c03fe8863c48105ec720e9deb4b32d860cfddb7890f2
|
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Test the get_frames_from_file by comparing
the direct, sequential retrieval of frames via mdtraj.load_frame() vs
the retrival via save_trajs
@author: gph82, clonker
'''
from __future__ import absolute_import
import pkg_resources
import unittest
import os
from numpy.random import randint
from numpy import floor, allclose
import numpy as np
import mdtraj as md
from pyemma.coordinates.data.util.frames_from_file import frames_from_files as _frames_from_file
from pyemma.coordinates.data.util.reader_utils import compare_coords_md_trajectory_objects
class TestFramesFromFile(unittest.TestCase):
def setUp(self):
self.eps = 1e-10
path = pkg_resources.resource_filename(__name__, 'data') + os.path.sep
self.pdbfile = os.path.join(path, 'bpti_ca.pdb')
self.trajfiles = os.path.join(path, 'bpti_mini.xtc')
# Create of frames to be retrieved from trajfiles
self.n_frames = 50
self.frames = randint(0, high = 100, size = self.n_frames)
self.chunksize = 30
self.mdTrajectory = md.load(self.pdbfile)
def test_returns_trajectory(self):
assert isinstance(_frames_from_file(self.trajfiles, self.pdbfile, self.frames),
md.Trajectory)
def test_gets_the_right_frames_no_stride_no_chunk(self):
# I am calling this "no_chunk" because chunksize = int(1e3) will force frames_from_file to load one single chunk
traj_test = _frames_from_file(self.trajfiles, self.pdbfile, self.frames, chunksize = int(1e3), verbose=False)
traj_ref = md.load(self.trajfiles, top = self.pdbfile)[self.frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_no_stride_with_chunk(self):
traj_test = _frames_from_file(self.trajfiles, self.pdbfile, self.frames, chunksize=self.chunksize, verbose = False)
traj_ref = md.load(self.trajfiles, top=self.pdbfile)[self.frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom = 0, mess = False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_with_stride_no_chunk(self):
# I am calling this "no_chunk" because chunksize = int(1e3) will force frames_from_file to load one single chunk
for stride in [2, 5, 10]:
# Make sure we don't overshoot the number of available frames (100)
frames = randint(0, high=floor(100 / stride), size=self.n_frames)
traj_test = _frames_from_file(self.trajfiles, self.pdbfile, frames, stride = stride, verbose=False)
traj_ref = md.load(self.trajfiles, top=self.pdbfile, stride = stride)[frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_with_stride_with_chunk(self):
for stride in [2, 3, 5, 6, 10, 15]:
# Make sure we don't overshoot the number of available frames (100)
frames = randint(0, high = floor(100/stride), size = self.n_frames)
traj_test = _frames_from_file(self.trajfiles, self.pdbfile, frames,
chunksize=self.chunksize,
stride = stride,
verbose=False)
traj_ref = md.load(self.trajfiles, top=self.pdbfile, stride = stride)[frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_with_stride_with_chunk_mdTrajectory_input(self):
for stride in [2, 3, 5, 6, 10, 15]:
# Make sure we don't overshoot the number of available frames (100)
frames = randint(0, high = floor(100/stride), size = self.n_frames)
traj_test = _frames_from_file(self.trajfiles, self.mdTrajectory, frames,
chunksize=self.chunksize,
stride = stride,
verbose=False)
traj_ref = md.load(self.trajfiles, top=self.pdbfile, stride = stride)[frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_with_stride_with_chunk_mdTopology_input(self):
for stride in [2, 3, 5, 6, 10, 15]:
# Make sure we don't overshoot the number of available frames (100)
frames = randint(0, high = floor(100/stride), size = self.n_frames)
traj_test = _frames_from_file(self.trajfiles, self.mdTrajectory.top, frames,
chunksize=self.chunksize,
stride = stride,
verbose=False)
traj_ref = md.load(self.trajfiles, top=self.pdbfile, stride = stride)[frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
def test_gets_the_right_frames_with_stride_with_copy(self):
for stride in [2, 3, 5, 6, 10, 15]:
# Make sure we don't overshoot the number of available frames (100)
frames = randint(0, high = floor(100/stride), size = self.n_frames)
traj_test = _frames_from_file(self.trajfiles, self.pdbfile, frames,
chunksize=self.chunksize,
stride = stride,
verbose=False,
copy_not_join=True
)
traj_ref = md.load(self.trajfiles, top=self.pdbfile, stride = stride)[frames]
(found_diff, errmsg) = compare_coords_md_trajectory_objects(traj_test, traj_ref, atom=0, mess=False)
self.assertFalse(found_diff, errmsg)
assert allclose(traj_test.unitcell_lengths, traj_ref.unitcell_lengths)
assert allclose(traj_test.unitcell_angles, traj_ref.unitcell_angles)
def test_trajs_larger_than_frame_index(self):
""" file list is larger than largest traj file """
from pyemma.coordinates.tests.util import create_traj, get_top
files = [create_traj(length=10)[0] for _ in range(20)]
inds = np.vstack((np.arange(20), np.arange(20))).T
with self.assertRaises(ValueError) as cm:
_frames_from_file(files, top=get_top(), frames=inds)
import re
matches = re.match(".*10\).*is larger than trajectory length.*\= 10", cm.exception.args[0])
assert matches
def test_pass_reader(self):
from pyemma.coordinates import source
reader = source(self.trajfiles, top=self.pdbfile)
reader.in_memory=True
inds = np.vstack((np.random.randint(0,1), np.random.randint(0, 100))).T
traj_test = _frames_from_file(reader.filenames, self.pdbfile, inds, reader=reader)
if __name__ == "__main__":
unittest.main()
|
marscher/PyEMMA
|
pyemma/coordinates/tests/test_frames_from_file.py
|
Python
|
lgpl-3.0
| 8,147
|
[
"MDTraj"
] |
1e67926aa3ca6973f778ae393e5ef1bdac64f6cd90653900f05ee3ad6ab8d262
|
try: paraview.simple
except: from paraview.simple import *
paraview.simple._DisableFirstRenderCameraReset()
ImagePermute()
|
jeromevelut/Peavip
|
Testing/ImagePermute.py
|
Python
|
gpl-3.0
| 125
|
[
"ParaView"
] |
4b1ca4ed39ac8b27d15b26613fcf26610ea04bbda7bdcede8f7e5a0197466300
|
"""
Testing xml sample from noaa catalog: http://www.esrl.noaa.gov/psd/thredds/catalog.xml
"""
import pytest
from threddsclient import read_xml
def test_noaa_catalog():
xml = """
<catalog xmlns="http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0" xmlns:xlink="http://www.w3.org/1999/xlink" name="THREDDS PSD Test Catalog" version="1.0.1"> # noqa
<service name="all" serviceType="Compound" base="">
<service name="odap" serviceType="OPENDAP" base="/psd/thredds/dodsC/" />
<service name="http" serviceType="HTTPServer" base="/psd/thredds/fileServer/" />
<service name="wcs" serviceType="WCS" base="/psd/thredds/wcs/" />
<service name="wms" serviceType="WMS" base="/psd/thredds/wms/" />
</service>
<catalogRef name="" xlink:href="/psd/thredds/catalog/Datasets/catalog.xml" xlink:title="Datasets">
<metadata inherited="true">
<serviceName>all</serviceName>
<dataType>GRID</dataType>
</metadata>
<property name="DatasetScan" value="true" />
</catalogRef>
<catalogRef xlink:href="aggregations.xml" xlink:title="Aggregations" name="" />
</catalog>
"""
cat = read_xml(xml, 'http://example.test/catalog.xml')
assert cat.name == "THREDDS PSD Test Catalog"
assert cat.services[0].name == 'all'
assert cat.services[0].service_type == 'Compound'
assert cat.services[0].url == 'http://example.test/catalog.xml'
assert cat.services[0].services[0].name == 'odap'
assert cat.services[0].services[0].service_type == 'OPENDAP'
assert cat.services[0].services[0].url == 'http://example.test/psd/thredds/dodsC/'
assert cat.references[0].name == "Datasets"
assert cat.references[1].name == "Aggregations"
assert len(cat.flat_datasets()) == 0
assert len(cat.flat_references()) == 2
def test_noaa_datasets():
xml = """
<catalog xmlns="http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0.1"> # noqa
<service name="all" serviceType="Compound" base="">
<service name="odap" serviceType="OPENDAP" base="/psd/thredds/dodsC/" />
<service name="http" serviceType="HTTPServer" base="/psd/thredds/fileServer/" />
<service name="wcs" serviceType="WCS" base="/psd/thredds/wcs/" />
<service name="wms" serviceType="WMS" base="/psd/thredds/wms/" />
</service>
<dataset name="Datasets" ID="Datasets">
<metadata inherited="true">
<serviceName>all</serviceName>
<dataType>GRID</dataType>
</metadata>
<catalogRef xlink:href="ncep.reanalysis/catalog.xml" xlink:title="ncep.reanalysis" ID="Datasets/ncep.reanalysis" name="" />
<catalogRef xlink:href="ncep.reanalysis.dailyavgs/catalog.xml" xlink:title="ncep.reanalysis.dailyavgs" ID="Datasets/ncep.reanalysis.dailyavgs" name="" />
<catalogRef xlink:href="ncep.reanalysis2/catalog.xml" xlink:title="ncep.reanalysis2" ID="Datasets/ncep.reanalysis2" name="" />
<catalogRef xlink:href="ncep.reanalysis2.dailyavgs/catalog.xml" xlink:title="ncep.reanalysis2.dailyavgs" ID="Datasets/ncep.reanalysis2.dailyavgs" name="" />
</dataset>
</catalog>
"""
cat = read_xml(xml, 'http://example.test/catalog.xml')
assert cat.services[0].services[1].name == 'http'
assert cat.services[0].services[1].service_type == 'HTTPServer'
assert cat.services[0].services[1].url == 'http://example.test/psd/thredds/fileServer/'
assert cat.datasets[0].name == "Datasets"
assert cat.datasets[0].content_type == "application/directory"
assert cat.datasets[0].references[0].name == "ncep.reanalysis"
assert cat.datasets[0].references[0].url == "http://example.test/ncep.reanalysis/catalog.xml"
def test_noaa_datasets_dailyavgs():
xml = """
<catalog xmlns="http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0.1"> # noqa
<service name="all" serviceType="Compound" base="">
<service name="odap" serviceType="OPENDAP" base="/psd/thredds/dodsC/" />
<service name="http" serviceType="HTTPServer" base="/psd/thredds/fileServer/" />
<service name="wcs" serviceType="WCS" base="/psd/thredds/wcs/" />
<service name="wms" serviceType="WMS" base="/psd/thredds/wms/" />
</service>
<dataset name="ncep.reanalysis2.dailyavgs" ID="Datasets/ncep.reanalysis2.dailyavgs">
<metadata inherited="true">
<serviceName>all</serviceName>
<dataType>GRID</dataType>
</metadata>
<catalogRef xlink:href="gaussian_grid/catalog.xml" xlink:title="gaussian_grid" ID="Datasets/ncep.reanalysis2.dailyavgs/gaussian_grid" name="" />
<catalogRef xlink:href="pressure/catalog.xml" xlink:title="pressure" ID="Datasets/ncep.reanalysis2.dailyavgs/pressure" name="" />
<catalogRef xlink:href="surface/catalog.xml" xlink:title="surface" ID="Datasets/ncep.reanalysis2.dailyavgs/surface" name="" />
</dataset>
</catalog>
"""
cat = read_xml(xml, 'http://example.test/catalog.xml')
assert cat.services[0].services[3].name == 'wms'
assert cat.services[0].services[3].service_type == 'WMS'
assert cat.services[0].services[3].url == 'http://example.test/psd/thredds/wms/'
assert cat.datasets[0].name == "ncep.reanalysis2.dailyavgs"
assert cat.datasets[0].ID == "Datasets/ncep.reanalysis2.dailyavgs"
assert cat.datasets[0].url == "http://example.test/catalog.xml?dataset=Datasets/ncep.reanalysis2.dailyavgs"
assert cat.datasets[0].content_type == "application/directory"
assert len(cat.datasets[0].datasets) == 0
assert len(cat.datasets[0].references) == 3
assert cat.datasets[0].references[2].name == "surface"
assert cat.datasets[0].references[2].url == "http://example.test/surface/catalog.xml"
assert len(cat.flat_datasets()) == 0
assert len(cat.flat_references()) == 3
def test_noaa_datasets_dailyavg_surface():
xml = """
<catalog xmlns="http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0.1"> # noqa
<service name="all" serviceType="Compound" base="">
<service name="odap" serviceType="OPENDAP" base="/psd/thredds/dodsC/" />
<service name="http" serviceType="HTTPServer" base="/psd/thredds/fileServer/" />
<service name="wcs" serviceType="WCS" base="/psd/thredds/wcs/" />
<service name="wms" serviceType="WMS" base="/psd/thredds/wms/" />
</service>
<dataset name="surface" ID="Datasets/ncep.reanalysis2.dailyavgs/surface">
<metadata inherited="true">
<serviceName>all</serviceName>
<dataType>GRID</dataType>
</metadata>
<dataset name="mslp.1980.nc" ID="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc" urlPath="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc">
<dataSize units="Mbytes">7.706</dataSize>
<date type="modified">2011-06-14T00:17:05Z</date>
</dataset>
<dataset name="mslp.1981.nc" ID="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1981.nc" urlPath="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1981.nc">
<dataSize units="Mbytes">7.685</dataSize>
<date type="modified">2011-06-14T00:17:16Z</date>
</dataset>
<dataset name="mslp.1982.nc" ID="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1982.nc" urlPath="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1982.nc">
<dataSize units="Mbytes">7.685</dataSize>
<date type="modified">2011-06-14T00:17:14Z</date>
</dataset>
<dataset name="mslp.1983.nc" ID="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1983.nc" urlPath="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1983.nc">
<dataSize units="Mbytes">7.685</dataSize>
<date type="modified">2011-06-14T00:16:56Z</date>
</dataset>
<dataset name="mslp.1984.nc" ID="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1984.nc" urlPath="Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1984.nc">
<dataSize units="Mbytes">7.706</dataSize>
<date type="modified">2011-06-14T00:17:19Z</date>
</dataset>
</dataset>
</catalog>
"""
cat = read_xml(xml, 'http://example.test/catalog.xml')
assert cat.services[0].services[2].name == 'wcs'
assert cat.services[0].services[2].service_type == 'WCS'
assert cat.services[0].services[2].url == 'http://example.test/psd/thredds/wcs/'
assert cat.datasets[0].name == "surface"
assert cat.datasets[0].content_type == "application/directory"
assert cat.datasets[0].service_name == "all"
assert cat.datasets[0].data_type == "GRID"
assert cat.datasets[0].datasets[0].name == "mslp.1980.nc"
assert cat.datasets[0].datasets[0].ID == "Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc"
assert cat.datasets[0].datasets[0].url_path == "Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc"
assert cat.datasets[0].datasets[0].modified == '2011-06-14T00:17:05Z'
assert cat.datasets[0].datasets[0].bytes == 7706000
assert cat.datasets[0].datasets[0].data_type == 'GRID'
assert cat.datasets[0].datasets[0].service_name == 'all'
assert cat.datasets[0].datasets[0].url == \
"http://example.test/catalog.xml?dataset=Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc"
assert cat.datasets[0].datasets[0].download_url() == \
'http://example.test/psd/thredds/fileServer/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc'
assert cat.datasets[0].datasets[0].opendap_url() == \
'http://example.test/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc'
assert cat.datasets[0].datasets[0].wms_url() == \
'http://example.test/psd/thredds/wms/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1980.nc'
assert cat.datasets[0].datasets[0].content_type == 'application/netcdf'
assert len(cat.download_urls()) == 5
assert cat.download_urls()[1] == \
"http://example.test/psd/thredds/fileServer/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1981.nc"
assert len(cat.opendap_urls()) == 5
assert cat.opendap_urls()[2] == \
"http://example.test/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.1982.nc"
assert len(cat.flat_datasets()) == 5
assert len(cat.flat_references()) == 0
|
bird-house/threddsclient
|
tests/test_noaa_sample.py
|
Python
|
apache-2.0
| 10,499
|
[
"NetCDF"
] |
57d1c4f1f7d2ff72c99f573371648f0eb42bd49df94ea8758f5dfbe7061559e7
|
# ======================================================================
#
# Cosmograil: cosmograil.tools.sexcatalog
#
# sexcatalog module.
#
# Author: Laurent Le Guillou <laurentl@ster.kuleuven.ac.be>
#
# $Id: sexcatalog.py,v 1.1 2005/06/29 13:07:41 hack Exp $
#
# ======================================================================
#
# "sexcatalog": python module to read and parse SExtractor catalogs
# A simple interface to read SExtractor text catalogs
#
# ======================================================================
#
# $Log: sexcatalog.py,v $
# Revision 1.1 2005/06/29 13:07:41 hack
# Added Python interface to SExtractor to STSDAS$Python for use with 'tweakshifts'. WJH
#
# Revision 1.9 2005/02/14 19:27:31 laurentl
# Added write facilities to rdb module.
#
# Revision 1.8 2005/02/14 17:47:02 laurentl
# Added iterator interface
#
# Revision 1.7 2005/02/14 17:16:30 laurentl
# clean now removes the NNW config file too.
#
# Revision 1.2 2005/02/14 17:13:49 laurentl
# *** empty log message ***
#
# Revision 1.1 2005/02/14 11:34:10 laurentl
# quality monitor now uses SExtractor wrapper.
#
# Revision 1.5 2005/02/11 14:40:35 laurentl
# minor changes
#
# Revision 1.4 2005/02/10 20:15:14 laurentl
# Improved SExtractor wrapper.
#
# Revision 1.2 2005/02/09 23:32:50 laurentl
# Implemented SExtractor wrapper
#
# Revision 1.1 2005/01/06 12:29:25 laurentl
# Added a SExtractor wrapper module. Renamed sextractor.py sexcatalog.py.
#
# Revision 1.1 2004/12/09 03:06:23 laurentl
# Changed tree structure
#
# Revision 1.5 2004/11/26 18:26:59 laurentl
# Added a module to manage the data tree.
#
# Revision 1.4 2004/11/24 15:11:31 laurentl
# Fixed a lot of bugs in sexcatalog module.
#
# Revision 1.2 2004/11/23 22:38:23 laurentl
# Added sexcatalog module.
#
#
# ======================================================================
import os
"""
A simple interface to manipulate SExtractor ASCII catalogs
A simple interface to manipulate SExtractor ASCII catalogs
through a file-like API (open, read, readline, etc.).
For the moment only reading ('r' mode) is supported.
by Laurent Le Guillou
version: 0.1.5 - last modified: 2005-02-14
Future: implement a 'w' mode to be able to save catalogs
in SExtractor format.
Examples:
-----------------------------------------------------------------
# Through sexcatalog module
import sexcatalog
# Read a SExtractor ASCII catalog
# First method: read the whole catalog at once
catalog_f = sexcatalog.open(catalog_name)
catalog = catalog_f.readlines()
for star in catalog:
print star['FLUX_BEST'], star['FLAGS']
if (star['FLAGS'] & sexcatalog.BLENDED):
print "This star is BLENDED"
catalog_f.close()
# Second method: read the catalog star by star
catalog_f = sexcatalog.open(catalog_name)
for star in catalog_f:
print star['FLUX_BEST'], star['FLAGS']
if (star['FLAGS'] & sexcatalog.BLENDED):
print "This star is BLENDED"
catalog_f.close()
# -------------
# Through sextractor module
import sextractor
# Read a SExtractor ASCII catalog
# First method: read the whole catalog at once
catalog_f = sextractor.open(catalog_name)
catalog = catalog_f.readlines()
for star in catalog:
print star['FLUX_BEST'], star['FLAGS']
if (star['FLAGS'] & sextractor.BLENDED):
print "This star is BLENDED"
catalog_f.close()
# Second method: read the catalog star by star
catalog_f = sextractor.open(catalog_name)
star = catalog_f.readline()
while star:
print star['FLUX_BEST'], star['FLAGS']
if (star['FLAGS'] & sextractor.BLENDED):
print "This star is BLENDED"
star = catalog_f.readline()
catalog_f.close()
-----------------------------------------------------------------
"""
# ======================================================================
import __builtin__
#import sys
#import exceptions
# ======================================================================
__version__ = "0.1.5 (2005-02-14)"
# ======================================================================
# -- FLAGS meaning
NEIGHBOURS = 1
BLENDED = 2
SATURATED = 4
TRUNCATED = 8
CORRUPTED_APER = 16
CORRUPTED_ISO = 32
OVERFLOW_DEBLEND = 64
OVERFLOW_EXTRACT = 128
class WrongSExtractorfileException(Exception):
pass
class SExtractorfile:
"""
A class to manipulate SExtractor ASCII catalogs.
For the moment only reading ('r' mode) is supported.
"""
_SE_keys = \
{"NUMBER": {"comment": "Running object number",
"infunc": int,
"format": "%10d",
"unit": ""},
"FLAGS": {"comment": "Extraction flags",
"infunc": int,
"format": "%3d",
"unit": ""},
"FLUX_ISO": {"comment": "Isophotal flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"FLUXERR_ISO": {"comment": "RMS error for isophotal flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MAG_ISO": {"comment": "Isophotal magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"MAGERR_ISO": {"comment":
"RMS error for isophotal magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"FLUX_ISOCOR": {"comment": "Corrected isophotal flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"FLUXERR_ISOCOR": {"comment":
"RMS error for corrected isophotal flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MAG_ISOCOR": {"comment": "Corrected isophotal magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"MAGERR_ISOCOR": {"comment":
"RMS error for corrected isophotal magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"FLUX_AUTO": {"comment":
"Flux within a Kron-like elliptical aperture",
"infunc": float,
"format": "%12g",
"unit": "count"},
"FLUXERR_AUTO": {"comment": "RMS error for AUTO flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MAG_AUTO": {"comment":
"Kron-like elliptical aperture magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"MAGERR_AUTO": {"comment": "RMS error for AUTO magnitude",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"FLUX_BEST": {"comment":
"Best of FLUX_AUTO and FLUX_ISOCOR",
"infunc": float,
"format": "%12g",
"unit": "count"},
"FLUXERR_BEST": {"comment": "RMS error for BEST flux",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MAG_BEST": {"comment": "Best of MAG_AUTO and MAG_ISOCOR",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"MAGERR_BEST": {"comment": "RMS error for MAG_BEST",
"infunc": float,
"format": "%8.4f",
"unit": "mag"},
"KRON_RADIUS": {"comment":
"Kron apertures in units of A or B",
"infunc": float,
"format": "%5.2f",
"unit": ""},
"BACKGROUND": {"comment": "Background at centroid position",
"infunc": float,
"format": "%12g",
"unit": "count"},
"THRESHOLD": {"comment":
"Detection threshold above background",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MU_THRESHOLD": {"comment":
"Detection threshold above background",
"infunc": float,
"format": "%8.4f",
"unit": "mag * arcsec**(-2)"},
"FLUX_MAX": {"comment": "Peak flux above background",
"infunc": float,
"format": "%12g",
"unit": "count"},
"MU_MAX": {"comment":
"Peak surface brightness above background",
"infunc": float,
"format": "%8.4f",
"unit": "mag * arcsec**(-2)"},
"ISOAREA_WORLD": {"comment":
"Isophotal area above Analysis threshold",
"infunc": float,
"format": "%12g",
"unit": "deg**2"},
"XMIN_IMAGE": {"comment":
"Minimum x-coordinate among detected pixels",
"infunc": int,
"format": "%10d",
"unit": "pixel"},
"YMIN_IMAGE": {"comment":
"Minimum y-coordinate among detected pixels",
"infunc": int,
"format": "%10d",
"unit": "pixel"},
"XMAX_IMAGE": {"comment":
"Maximum x-coordinate among detected pixels",
"infunc": int,
"format": "%10d",
"unit": "pixel"},
"YMAX_IMAGE": {"comment":
"Maximum y-coordinate among detected pixels",
"infunc": int,
"format": "%10d",
"unit": "pixel"},
"X_IMAGE": {"comment": "Object position along x",
"infunc": float,
"format": "%10.3f",
"unit": "pixel"},
"Y_IMAGE": {"comment": "Object position along y",
"infunc": float,
"format": "%10.3f",
"unit": "pixel"},
"XWIN_IMAGE": {"comment": "Windowed position estimate along x",
"infunc": float,
"format": "%10.3f",
"unit": "pixel"},
"YWIN_IMAGE": {"comment": "Windowed position estimate along y",
"infunc": float,
"format": "%10.3f",
"unit": "pixel"},
"X_WORLD": {"comment":
"Barycenter position along world x axis",
"infunc": float,
"format": "%15e",
"unit": "deg"},
"Y_WORLD": {"comment":
"Barycenter position along world y axis",
"infunc": float,
"format": "%15e",
"unit": "deg"},
"ALPHA_SKY": {"comment":
"Right ascension of barycenter (native)",
"infunc": float,
"format": "%11.7f",
"unit": "deg"},
"DELTA_SKY": {"comment":
"Declination of barycenter (native)",
"infunc": float,
"format": "%+11.7f",
"unit": "deg"},
"ALPHA_J2000": {"comment":
"Right ascension of barycenter (J2000)",
"infunc": float,
"format": "%11.7f",
"unit": "deg"},
"DELTA_J2000": {"comment":
"Declination of barycenter (J2000)",
"infunc": float,
"format": "%+11.7f",
"unit": "deg"},
"ALPHA_B1950": {"comment":
"Right ascension of barycenter (B1950)",
"infunc": float,
"format": "%11.7f",
"unit": "deg"},
"DELTA_B1950": {"comment":
"Declination of barycenter (B1950)",
"infunc": float,
"format": "%+11.7f",
"unit": "deg"},
"X2_IMAGE": {"comment": "Variance along x",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"Y2_IMAGE": {"comment": "Variance along y",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"XY_IMAGE": {"comment": "Covariance between x and y",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"CXX_IMAGE": {"comment": "Cxx object ellipse parameter",
"infunc": float,
"format": "%12e",
"unit": "pixel**(-2)"},
"CYY_IMAGE": {"comment": "Cyy object ellipse parameter",
"infunc": float,
"format": "%12e",
"unit": "pixel**(-2)"},
"CXY_IMAGE": {"comment": "Cxy object ellipse parameter",
"infunc": float,
"format": "%12e",
"unit": "pixel**(-2)"},
"A_IMAGE": {"comment": "Profile RMS along major axis",
"infunc": float,
"format": "%9.3f",
"unit": "pixel"},
"B_IMAGE": {"comment": "Profile RMS along minor axis",
"infunc": float,
"format": "%9.3f",
"unit": "pixel"},
"THETA_IMAGE": {"comment": "Position angle (CCW/x)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"ELONGATION": {"comment": "A_IMAGE/B_IMAGE",
"infunc": float,
"format": "%8.3f",
"unit": ""},
"ELLIPTICITY": {"comment": "1 - B_IMAGE/A_IMAGE",
"infunc": float,
"format": "%8.3f",
"unit": ""},
"ERRX2_IMAGE": {"comment": "Variance of position along x",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"ERRY2_IMAGE": {"comment": "Variance of position along y",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"ERRXY_IMAGE": {"comment":
"Covariance of position between x and y",
"infunc": float,
"format": "%15e",
"unit": "pixel**2"},
"ERRCXX_IMAGE": {"comment": "Cxx error ellipse parameter",
"infunc": float,
"format": "%12g",
"unit": "pixel**(-2)"},
"ERRCYY_IMAGE": {"comment": "Cyy error ellipse parameter",
"infunc": float,
"format": "%12g",
"unit": "pixel**(-2)"},
"ERRCXY_IMAGE": {"comment": "Cxy error ellipse parameter",
"infunc": float,
"format": "%12g",
"unit": "pixel**(-2)"},
"ERRA_IMAGE": {"comment":
"RMS position error along major axis",
"infunc": float,
"format": "%8.4f",
"unit": "pixel"},
"ERRB_IMAGE": {"comment":
"RMS position error along minor axis",
"infunc": float,
"format": "%8.4f",
"unit": "pixel"},
"ERRTHETA_IMAGE": {"comment":
"Error ellipse position angle (CCW/x)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"FWHM_IMAGE": {"comment": "FWHM assuming a gaussian core",
"infunc": float,
"format": "%8.2f",
"unit": "pixel"},
"X2_WORLD": {"comment": "Variance along X-WORLD (alpha)",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"Y2_WORLD": {"comment": "Variance along Y-WORLD (delta)",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"XY_WORLD": {"comment":
"Covariance between X-WORLD and Y-WORLD",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"CXX_WORLD": {"comment":
"Cxx object ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12e",
"unit": "deg**(-2)"},
"CYY_WORLD": {"comment":
"Cyy object ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12e",
"unit": "deg**(-2)"},
"CXY_WORLD": {"comment":
"Cxy object ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12e",
"unit": "deg**(-2)"},
"A_WORLD": {"comment":
"Profile RMS along major axis (world units)",
"infunc": float,
"format": "%12g",
"unit": "deg"},
"B_WORLD": {"comment":
"Profile RMS along minor axis (world units)",
"infunc": float,
"format": "%12g",
"unit": "deg"},
"THETA_WORLD": {"comment": "Position angle (CCW/world-x)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"THETA_SKY": {"comment":
"Position angle (east of north) (native)",
"infunc": float,
"format": "%+6.2f",
"unit": "deg"},
"THETA_J2000": {"comment":
"Position angle (east of north) (J2000)",
"infunc": float,
"format": "%+6.2f",
"unit": "deg"},
"THETA_B1950": {"comment":
"Position angle (east of north) (B1950)",
"infunc": float,
"format": "%+6.2f",
"unit": "deg"},
"ERRX2_WORLD": {"comment":
"Variance of position along X-WORLD (alpha)",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"ERRY2_WORLD": {"comment":
"Variance of position along Y-WORLD (delta)",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"ERRXY_WORLD": {"comment":
"Covariance of position X-WORLD/Y-WORLD",
"infunc": float,
"format": "%15e",
"unit": "deg**2"},
"ERRCXX_WORLD": {"comment":
"Cxx error ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12g",
"unit": "deg**(-2)"},
"ERRCYY_WORLD": {"comment":
"Cyy error ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12g",
"unit": "deg**(-2)"},
"ERRCXY_WORLD": {"comment":
"Cxy error ellipse parameter (WORLD units)",
"infunc": float,
"format": "%12g",
"unit": "deg**(-2)"},
"ERRA_WORLD": {"comment":
"World RMS position error along major axis",
"infunc": float,
"format": "%12g",
"unit": "pixel"},
"ERRB_WORLD": {"comment":
"World RMS position error along minor axis",
"infunc": float,
"format": "%12g",
"unit": "pixel"},
"ERRTHETA_WORLD": {"comment":
"Error ellipse pos. angle (CCW/world-x)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"ERRTHETA_SKY": {"comment":
"Native error ellipse pos." +
"angle (east of north)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"ERRTHETA_J2000": {"comment":
"J2000 error ellipse pos." +
"angle (east of north)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"ERRTHETA_B1950": {"comment":
"B1950 error ellipse pos." +
"angle (east of north)",
"infunc": float,
"format": "%5.1f",
"unit": "deg"},
"FWHM_WORLD": {"comment": "FWHM assuming a gaussian core",
"infunc": float,
"format": "%12g",
"unit": "deg"},
"CLASS_STAR": {"comment": "S/G classifier output",
"infunc": float,
"format": "%5.2f",
"unit": ""}
}
def __init__(self, name, mode='r'):
self.name = name
self.mode = mode
self.closed = True
self._file = None
self._keys = list()
self._keys_positions = {}
self._output = None
self._firstline = True
if self.mode != 'r':
raise ValueError, \
'only read-only access is now implemented.'
self._file = __builtin__.open(self.name, self.mode)
self.closed = False
# Reading header
self._line = self._file.readline()
if not(self._line):
raise WrongSExtractorfileException, \
'not a SExtractor text catalog (empty file)'
while (self._line):
__ll = (self._line).replace('\n', '')
if __ll[0] == '#': # Still in header
columns = __ll.split()
if len(columns) < 3:
raise WrongSExtractorfileException, \
'not a SExtractor text catalog (invalid header)'
name = columns[2]
if not(name in SExtractorfile._SE_keys.keys()):
raise WrongSExtractorfileException, \
'not a SExtractor text catalog (unknown keyword %s)'\
% name
self._keys_positions[name] = int(columns[1]) - 1
self._keys.append(name)
else:
break
self._line = self._file.readline()
if not(self._keys):
raise WrongSExtractorfileException, \
'not a SExtractor text catalog (empty header)'
self._outdict = dict([(k, None) for k in self._keys])
self._firstline = True
def __del__(self):
self.close()
def __iter__(self):
return self
def next(self):
rr = self.readline()
if not(rr):
raise StopIteration
return rr
def __nonzero__(self):
return self._file
def keys(self):
"Return the list of available parameters."
return self._keys
def getcolumns(self):
"Return the list of available parameters."
return self.keys()
def readline(self):
"""
Read and analyse the next line of the SExtractor catalog
and return a dictionary {'param1': value, 'param2': value, ...}.
"""
if not(self._firstline):
self._line = self._file.readline()
self._firstline = False
if not(self._line):
return None
__ll = (self._line).replace('\n', '')
__values = __ll.split()
self._outdict.update(dict(zip(self._keys, __values)))
for i in self._keys:
self._outdict[i] = (
SExtractorfile._SE_keys[i]["infunc"](self._outdict[i]))
return self._outdict.copy()
def read(self):
"""
Read the file until EOF and return a list of dictionaries.
"""
__result = []
__ll = self.readline()
while __ll:
__result.append(__ll)
__ll = self.readline()
return list(__result)
def readlines(self):
return self.read()
def close(self):
"""
Close the SExtractor file.
"""
if self._file:
if not(self._file.closed):
self._file.close()
self.closed = True
# ======================================================================
def open(name, mode='r'):
"""
Factory function.
Open a SExtractor file and return a SExtractor file object.
"""
return SExtractorfile(name, mode)
# ======================================================================
|
ankanaan/chimera
|
src/chimera/util/sexcatalog.py
|
Python
|
gpl-2.0
| 27,007
|
[
"Gaussian"
] |
a7ec0051af35ec826032d949d6af28959eb271fadb66f44b0338f1dedc7921ec
|
#
# When publishing work that uses these basis sets, please use the following citation:
#
# K.G. Dyall, Theor. Chem. Acc. (1998) 99:366; addendum Theor. Chem. Acc. (2002) 108:365;
# revision Theor. Chem. Acc. (2006) 115:441. Basis sets available from the Dirac web site,
# http://dirac.chem.sdu.dk.
Pt = [[0, -1, [62342089.0,1]], [0, -1, [16589944.0,1]], [0, -1, [5681162.1,1]],
[0, -1, [2164841.8,1]], [0, -1, [902504.9,1]], [0, -1, [397530.86,1]],
[0, -1, [183323.91,1]], [0, -1, [87299.317,1]], [0, -1, [42706.731,1]],
[0, -1, [21351.254,1]], [0, -1, [10883.187,1]], [0, -1, [5642.9413,1]],
[0, -1, [2971.6339,1]], [0, -1, [1587.1535,1]], [0, -1, [858.6781,1]],
[0, -1, [470.0627,1]], [0, -1, [258.41934,1]], [0, -1, [144.5214,1]],
[0, -1, [81.390237,1]], [0, -1, [44.8666,1]], [0, -1, [26.054094,1]],
[0, -1, [14.857134,1]], [0, -1, [8.0188666,1]], [0, -1, [4.412026,1]],
[0, -1, [2.2028461,1]], [0, -1, [1.1747053,1]], [0, -1, [0.58038176,1]],
[0, -1, [0.20062925,1]], [0, -1, [0.087656027,1]], [0, -1, [0.037707853,1]],
[1, 0, [25681628.0,1]], [1, 0, [5208104.3,1]], [1, 0, [1287419.1,1]],
[1, 0, [361868.0,1]], [1, 0, [112348.86,1]], [1, 0, [37965.378,1]],
[1, 0, [13886.26,1]], [1, 0, [5481.1599,1]], [1, 0, [2321.6748,1]],
[1, 0, [1045.6767,1]], [1, 0, [495.08759,1]], [1, 0, [243.80244,1]],
[1, 0, [124.04519,1]], [1, 0, [64.324598,1]], [1, 0, [34.404369,1]],
[1, 0, [18.460629,1]], [1, 0, [9.6342884,1]], [1, 0, [4.9988583,1]],
[1, 0, [2.43708,1]], [1, 0, [1.194136,1]], [1, 0, [0.54650458,1]],
[1, 0, [0.18944919,1]], [1, 0, [0.072990782,1]], [1, 0, [0.027478234,1]],
[2, 0, [13726.278,1]], [2, 0, [3497.1942,1]], [2, 0, [1231.0132,1]],
[2, 0, [511.61232,1]], [2, 0, [234.47388,1]], [2, 0, [114.63,1]],
[2, 0, [58.167907,1]], [2, 0, [30.288845,1]], [2, 0, [15.719476,1]],
[2, 0, [7.9912638,1]], [2, 0, [3.9867188,1]], [2, 0, [1.8480542,1]],
[2, 0, [0.82389625,1]], [2, 0, [0.3400657,1]], [2, 0, [0.12605538,1]],
[3, 0, [753.39068,1]], [3, 0, [256.76136,1]], [3, 0, [109.32209,1]],
[3, 0, [51.54658,1]], [3, 0, [25.680605,1]], [3, 0, [13.091382,1]],
[3, 0, [6.622071,1]], [3, 0, [3.2448099,1]], [3, 0, [1.455748,1]],
[3, 0, [0.48555165,1]], ]
At = [[0, 0, [6.0348256E+07, 1]],
[0, 0, [1.6062187E+07, 1]],
[0, 0, [5.4983695E+06, 1]],
[0, 0, [2.0938854E+06, 1]],
[0, 0, [8.7397731E+05, 1]],
[0, 0, [3.8623125E+05, 1]],
[0, 0, [1.7922839E+05, 1]],
[0, 0, [8.6061821E+04, 1]],
[0, 0, [4.2514482E+04, 1]],
[0, 0, [2.1466450E+04, 1]],
[0, 0, [1.1045500E+04, 1]],
[0, 0, [5.7758353E+03, 1]],
[0, 0, [3.0646676E+03, 1]],
[0, 0, [1.6479440E+03, 1]],
[0, 0, [8.9662945E+02, 1]],
[0, 0, [4.9214492E+02, 1]],
[0, 0, [2.7496500E+02, 1]],
[0, 0, [1.5580890E+02, 1]],
[0, 0, [8.9322531E+01, 1]],
[0, 0, [5.1403858E+01, 1]],
[0, 0, [3.0206500E+01, 1]],
[0, 0, [1.7630085E+01, 1]],
[0, 0, [9.9209803E+00, 1]],
[0, 0, [5.7049663E+00, 1]],
[0, 0, [3.1798259E+00, 1]],
[0, 0, [1.7280434E+00, 1]],
[0, 0, [9.2265694E-01, 1]],
[0, 0, [4.1308099E-01, 1]],
[0, 0, [1.9366504E-01, 1]],
[0, 0, [8.5956507E-02, 1]],
[1, 0, [4.8082029E+07, 1]],
[1, 0, [1.2975956E+07, 1]],
[1, 0, [3.8822931E+06, 1]],
[1, 0, [1.2615276E+06, 1]],
[1, 0, [4.3747176E+05, 1]],
[1, 0, [1.6030701E+05, 1]],
[1, 0, [6.1733059E+04, 1]],
[1, 0, [2.4933585E+04, 1]],
[1, 0, [1.0565929E+04, 1]],
[1, 0, [4.6991896E+03, 1]],
[1, 0, [2.1896979E+03, 1]],
[1, 0, [1.0645378E+03, 1]],
[1, 0, [5.3640018E+02, 1]],
[1, 0, [2.7850004E+02, 1]],
[1, 0, [1.4769404E+02, 1]],
[1, 0, [7.9030729E+01, 1]],
[1, 0, [4.3330235E+01, 1]],
[1, 0, [2.3734199E+01, 1]],
[1, 0, [1.2673710E+01, 1]],
[1, 0, [6.8190161E+00, 1]],
[1, 0, [3.5177624E+00, 1]],
[1, 0, [1.8070376E+00, 1]],
[1, 0, [8.9279827E-01, 1]],
[1, 0, [3.7624123E-01, 1]],
[1, 0, [1.5747465E-01, 1]],
[1, 0, [6.2205718E-02, 1]],
[2, 0, [4.3024363E+04, 1]],
[2, 0, [1.0418508E+04, 1]],
[2, 0, [3.4997410E+03, 1]],
[2, 0, [1.4042448E+03, 1]],
[2, 0, [6.3128950E+02, 1]],
[2, 0, [3.0578892E+02, 1]],
[2, 0, [1.5622137E+02, 1]],
[2, 0, [8.2504058E+01, 1]],
[2, 0, [4.4686721E+01, 1]],
[2, 0, [2.4327946E+01, 1]],
[2, 0, [1.3054823E+01, 1]],
[2, 0, [6.9879667E+00, 1]],
[2, 0, [3.6596287E+00, 1]],
[2, 0, [1.8569526E+00, 1]],
[2, 0, [9.1045074E-01, 1]],
[2, 0, [4.2148358E-01, 1]],
[2, 0, [1.7153562E-01, 1]],
[3, 0, [1.1997393E+03, 1]],
[3, 0, [4.0625428E+02, 1]],
[3, 0, [1.7260851E+02, 1]],
[3, 0, [8.2092879E+01, 1]],
[3, 0, [4.1340525E+01, 1]],
[3, 0, [2.1511024E+01, 1]],
[3, 0, [1.1203968E+01, 1]],
[3, 0, [5.7270911E+00, 1]],
[3, 0, [2.7467309E+00, 1]],
[3, 0, [1.0823341E+00, 1]],
[3, 0, [3.7295068E-01, 1]],]
I = [[0, 0, [7.6175652E+07, 1]],
[0, 0, [1.9849983E+07, 1]],
[0, 0, [6.5395721E+06, 1]],
[0, 0, [2.3596246E+06, 1]],
[0, 0, [9.1352219E+05, 1]],
[0, 0, [3.7073402E+05, 1]],
[0, 0, [1.5693523E+05, 1]],
[0, 0, [6.8905393E+04, 1]],
[0, 0, [3.1288540E+04, 1]],
[0, 0, [1.4648899E+04, 1]],
[0, 0, [7.0539515E+03, 1]],
[0, 0, [3.4846093E+03, 1]],
[0, 0, [1.7615182E+03, 1]],
[0, 0, [9.0920153E+02, 1]],
[0, 0, [4.7786933E+02, 1]],
[0, 0, [2.5427862E+02, 1]],
[0, 0, [1.3385760E+02, 1]],
[0, 0, [7.3157057E+01, 1]],
[0, 0, [4.0187794E+01, 1]],
[0, 0, [2.1892839E+01, 1]],
[0, 0, [1.2282452E+01, 1]],
[0, 0, [6.9006489E+00, 1]],
[0, 0, [3.7685525E+00, 1]],
[0, 0, [1.9974585E+00, 1]],
[0, 0, [1.0477282E+00, 1]],
[0, 0, [4.2646199E-01, 1]],
[0, 0, [2.0028304E-01, 1]],
[0, 0, [8.8978635E-02, 1]],
[1, 0, [7.5310209E+06, 1]],
[1, 0, [1.1662681E+06, 1]],
[1, 0, [2.5045040E+05, 1]],
[1, 0, [6.5445899E+04, 1]],
[1, 0, [1.9932240E+04, 1]],
[1, 0, [6.9208735E+03, 1]],
[1, 0, [2.6846079E+03, 1]],
[1, 0, [1.1374703E+03, 1]],
[1, 0, [5.1621020E+02, 1]],
[1, 0, [2.4693983E+02, 1]],
[1, 0, [1.2274916E+02, 1]],
[1, 0, [6.2799497E+01, 1]],
[1, 0, [3.2487371E+01, 1]],
[1, 0, [1.6723605E+01, 1]],
[1, 0, [8.7679297E+00, 1]],
[1, 0, [4.4589051E+00, 1]],
[1, 0, [2.2338061E+00, 1]],
[1, 0, [1.0911480E+00, 1]],
[1, 0, [4.3929315E-01, 1]],
[1, 0, [1.8360774E-01, 1]],
[1, 0, [7.2403358E-02, 1]],
[2, 0, [8.6279708E+03, 1]],
[2, 0, [2.2813047E+03, 1]],
[2, 0, [8.2732533E+02, 1]],
[2, 0, [3.5190121E+02, 1]],
[2, 0, [1.6496856E+02, 1]],
[2, 0, [8.2183367E+01, 1]],
[2, 0, [4.2729088E+01, 1]],
[2, 0, [2.2703366E+01, 1]],
[2, 0, [1.2260601E+01, 1]],
[2, 0, [6.6400786E+00, 1]],
[2, 0, [3.5392069E+00, 1]],
[2, 0, [1.8418472E+00, 1]],
[2, 0, [9.3488753E-01, 1]],
[2, 0, [4.4933710E-01, 1]],
[2, 0, [1.8644465E-01, 1]],]
Rn = [[0, 0, [5.8479849E+07, 1]],
[0, 0, [1.5566145E+07, 1]],
[0, 0, [5.3278717E+06, 1]],
[0, 0, [2.0283228E+06, 1]],
[0, 0, [8.4629773E+05, 1]],
[0, 0, [3.7383319E+05, 1]],
[0, 0, [1.7339590E+05, 1]],
[0, 0, [8.3220642E+04, 1]],
[0, 0, [4.1089364E+04, 1]],
[0, 0, [2.0733535E+04, 1]],
[0, 0, [1.0660059E+04, 1]],
[0, 0, [5.5696713E+03, 1]],
[0, 0, [2.9528148E+03, 1]],
[0, 0, [1.5862087E+03, 1]],
[0, 0, [8.6173506E+02, 1]],
[0, 0, [4.7095278E+02, 1]],
[0, 0, [2.6264365E+02, 1]],
[0, 0, [1.4820953E+02, 1]],
[0, 0, [8.4401933E+01, 1]],
[0, 0, [4.9057850E+01, 1]],
[0, 0, [2.8582642E+01, 1]],
[0, 0, [1.6459289E+01, 1]],
[0, 0, [9.4369669E+00, 1]],
[0, 0, [5.4204351E+00, 1]],
[0, 0, [3.0780686E+00, 1]],
[0, 0, [1.6814451E+00, 1]],
[0, 0, [9.0063187E-01, 1]],
[0, 0, [4.3544867E-01, 1]],
[0, 0, [2.0614433E-01, 1]],
[0, 0, [9.2763678E-02, 1]],
[1, 0, [4.7770857E+07, 1]],
[1, 0, [1.3066778E+07, 1]],
[1, 0, [3.9456280E+06, 1]],
[1, 0, [1.2913178E+06, 1]],
[1, 0, [4.5032579E+05, 1]],
[1, 0, [1.6574375E+05, 1]],
[1, 0, [6.4036428E+04, 1]],
[1, 0, [2.5920573E+04, 1]],
[1, 0, [1.0996543E+04, 1]],
[1, 0, [4.8918949E+03, 1]],
[1, 0, [2.2787363E+03, 1]],
[1, 0, [1.1071445E+03, 1]],
[1, 0, [5.5749184E+02, 1]],
[1, 0, [2.8926378E+02, 1]],
[1, 0, [1.5333618E+02, 1]],
[1, 0, [8.2060943E+01, 1]],
[1, 0, [4.4995535E+01, 1]],
[1, 0, [2.4656254E+01, 1]],
[1, 0, [1.3176946E+01, 1]],
[1, 0, [7.1060325E+00, 1]],
[1, 0, [3.6773071E+00, 1]],
[1, 0, [1.8984331E+00, 1]],
[1, 0, [9.4665972E-01, 1]],
[1, 0, [4.1173274E-01, 1]],
[1, 0, [1.7431844E-01, 1]],
[1, 0, [6.9513636E-02, 1]],
[2, 0, [4.6750752E+04, 1]],
[2, 0, [1.1284131E+04, 1]],
[2, 0, [3.7764933E+03, 1]],
[2, 0, [1.5103825E+03, 1]],
[2, 0, [6.7737925E+02, 1]],
[2, 0, [3.2759566E+02, 1]],
[2, 0, [1.6720408E+02, 1]],
[2, 0, [8.8294843E+01, 1]],
[2, 0, [4.7840134E+01, 1]],
[2, 0, [2.6086684E+01, 1]],
[2, 0, [1.4043765E+01, 1]],
[2, 0, [7.5496008E+00, 1]],
[2, 0, [3.9825780E+00, 1]],
[2, 0, [2.0379623E+00, 1]],
[2, 0, [1.0104144E+00, 1]],
[2, 0, [4.7300150E-01, 1]],
[2, 0, [1.9507807E-01, 1]],
[3, 0, [1.2648343E+03, 1]],
[3, 0, [4.2782369E+02, 1]],
[3, 0, [1.8169289E+02, 1]],
[3, 0, [8.6452888E+01, 1]],
[3, 0, [4.3573202E+01, 1]],
[3, 0, [2.2710364E+01, 1]],
[3, 0, [1.1860330E+01, 1]],
[3, 0, [6.0877675E+00, 1]],
[3, 0, [2.9409064E+00, 1]],
[3, 0, [1.1766074E+00, 1]],
[3, 0, [4.3100852E-01, 1]],]
U = [
[0, [5.6688627E+07, 1.]],
[0, [1.5089297E+07, 1.]],
[0, [5.1604752E+06, 1.]],
[0, [1.9616143E+06, 1.]],
[0, [8.1795253E+05, 1.]],
[0, [3.6155803E+05, 1.]],
[0, [1.6827256E+05, 1.]],
[0, [8.1257576E+04, 1.]],
[0, [4.0487578E+04, 1.]],
[0, [2.0659489E+04, 1.]],
[0, [1.0754051E+04, 1.]],
[0, [5.6898498E+03, 1.]],
[0, [3.0547388E+03, 1.]],
[0, [1.6617106E+03, 1.]],
[0, [9.1440113E+02, 1.]],
[0, [5.0567265E+02, 1.]],
[0, [2.8540399E+02, 1.]],
[0, [1.6292889E+02, 1.]],
[0, [9.3609383E+01, 1.]],
[0, [5.5811596E+01, 1.]],
[0, [3.2957255E+01, 1.]],
[0, [1.9148698E+01, 1.]],
[0, [1.1424216E+01, 1.]],
[0, [6.7462725E+00, 1.]],
[0, [4.0365190E+00, 1.]],
[0, [2.3217313E+00, 1.]],
[0, [1.3052333E+00, 1.]],
[0, [7.3409234E-01, 1.]],
[0, [3.9965415E-01, 1.]],
[0, [2.1380868E-01, 1.]],
[0, [8.6940003E-02, 1.]],
[0, [4.1541136E-02, 1.]],
[0, [1.9770412E-02, 1.]],
[1, [5.2699704E+07, 1.]],
[1, [1.5503102E+07, 1.]],
[1, [4.9225272E+06, 1.]],
[1, [1.6757629E+06, 1.]],
[1, [6.0281298E+05, 1.]],
[1, [2.2728619E+05, 1.]],
[1, [8.9383515E+04, 1.]],
[1, [3.6589535E+04, 1.]],
[1, [1.5594049E+04, 1.]],
[1, [6.9284886E+03, 1.]],
[1, [3.2106020E+03, 1.]],
[1, [1.5490141E+03, 1.]],
[1, [7.7455359E+02, 1.]],
[1, [3.9927892E+02, 1.]],
[1, [2.1150500E+02, 1.]],
[1, [1.1442300E+02, 1.]],
[1, [6.3349266E+01, 1.]],
[1, [3.5426175E+01, 1.]],
[1, [1.9564577E+01, 1.]],
[1, [1.0924289E+01, 1.]],
[1, [6.0461422E+00, 1.]],
[1, [3.2986304E+00, 1.]],
[1, [1.7645466E+00, 1.]],
[1, [8.8905399E-01, 1.]],
[1, [4.4182761E-01, 1.]],
[1, [2.1288694E-01, 1.]],
[1, [8.3030571E-02, 1.]],
[1, [3.6616079E-02, 1.]],
[1, [1.5628926E-02, 1.]],
[2, [1.1030949E+05, 1.]],
[2, [2.5979209E+04, 1.]],
[2, [8.4185541E+03, 1.]],
[2, [3.2641000E+03, 1.]],
[2, [1.4279053E+03, 1.]],
[2, [6.7981618E+02, 1.]],
[2, [3.4352647E+02, 1.]],
[2, [1.8152057E+02, 1.]],
[2, [9.8735517E+01, 1.]],
[2, [5.4973074E+01, 1.]],
[2, [3.0782635E+01, 1.]],
[2, [1.7041311E+01, 1.]],
[2, [9.4787015E+00, 1.]],
[2, [5.2090831E+00, 1.]],
[2, [2.7893130E+00, 1.]],
[2, [1.4539969E+00, 1.]],
[2, [7.1899480E-01, 1.]],
[2, [3.0121159E-01, 1.]],
[2, [1.1921203E-01, 1.]],
[2, [4.3915110E-02, 1.]],
[3, [1.1523569E+03, 1.]],
[3, [3.8849177E+02, 1.]],
[3, [1.6470953E+02, 1.]],
[3, [7.7567896E+01, 1.]],
[3, [3.8758229E+01, 1.]],
[3, [1.9814017E+01, 1.]],
[3, [1.0164770E+01, 1.]],
[3, [5.1039559E+00, 1.]],
[3, [2.3910278E+00, 1.]],
[3, [1.0651646E+00, 1.]],
[3, [4.3998357E-01, 1.]],
[3, [1.5808424E-01, 1.]],
]
# flake8: noqa
|
sunqm/pyscf
|
pyscf/gto/basis/dyall_tz.py
|
Python
|
apache-2.0
| 13,052
|
[
"DIRAC"
] |
7c48aa6ad67485de29e700df604f5090153cf5eb9417eb1cba7370675f8ac93c
|
#!/usr/bin/env python3
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import chigger
colorbar = chigger.misc.ColorBar(cmap='viridis')
colorbar.setOptions('primary', lim=[5,10])
colorbar.setOptions('secondary', lim=[100,500], visible=True)
window = chigger.RenderWindow(colorbar, size=[600,400], test=True)
window.write('colorbar.png')
window.start()
|
nuclear-wizard/moose
|
python/chigger/tests/colorbar/colorbar.py
|
Python
|
lgpl-2.1
| 647
|
[
"MOOSE"
] |
b9da5239c161265d476787fb20323cf5f9be6985f2b7e06225febebe19c07d9b
|
import pytest
from capybara.node.element import Element
@pytest.mark.requires("js")
class TestEvaluateAsyncScript:
def test_evaluates_the_given_script_and_returns_whatever_it_produces(self, session):
session.visit("/with_js")
assert session.evaluate_async_script("arguments[0](4)") == 4
def test_supports_passing_elements_as_arguments_to_the_script(self, session):
session.visit("/with_js")
el = session.find("css", "#drag p")
result = session.evaluate_async_script(
"""
arguments[2]([arguments[0].innerText, arguments[1]]);
""",
el, "Doodle Funk")
assert result == ["This is a draggable element.", "Doodle Funk"]
def test_supports_returning_elements_after_a_timeout(self, session):
session.visit("/with_js")
session.find("css", "#change") # ensure page has loaded and element is available
el = session.evaluate_async_script(
"""
var cb = arguments[0];
setTimeout(function() {
cb(document.getElementById('change'));
}, 100);
""")
assert isinstance(el, Element)
assert el == session.find("css", "#change")
|
elliterate/capybara.py
|
capybara/tests/session/test_evaluate_async_script.py
|
Python
|
mit
| 1,233
|
[
"VisIt"
] |
1db79d842c6345de68676b7e5c6ca17d71f6757f511e6965c1c4adfd85cc2431
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RAnalysispageserver(RPackage):
"""AnalysisPageServer is a modular system that enables sharing
of customizable R analyses via the web."""
homepage = "https://www.bioconductor.org/packages/AnalysisPageServer/"
url = "https://git.bioconductor.org/packages/AnalysisPageServer"
version('1.10.0', git='https://git.bioconductor.org/packages/AnalysisPageServer', commit='876c87073be116fa15a1afdd407e21152eb80d50')
depends_on('r@3.4.0:3.4.9', when='@1.10.0')
depends_on('r-log4r', type=('build', 'run'))
depends_on('r-rjson', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-graph', type=('build', 'run'))
|
skosukhin/spack
|
var/spack/repos/builtin/packages/r-analysispageserver/package.py
|
Python
|
lgpl-2.1
| 1,943
|
[
"Bioconductor"
] |
f5108ee24e2ef1bb468361ef8aea9a626fdd276e561527e45a72714aa426d483
|
## Copyright 2011 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
This is for writing fixtures that import data from an MS-Access
database (:xfile:`.mdb`) into Lino.
Usage examples see
:mod:`lino.projects.pcsw.fixtures.pp2lino`
and
:mod:`lino.projects.crl.fixtures.hs2lino`.
It uses `mdb-export` to extract data from the :xfile:`.mdb`
file to :xfile:`.csv`, then reads these csv files.
`mdb-export` was written by Brian Bruns and is part
of the `mdbtools` Debian package. To install it::
aptitude install mdbtools
Usage of `mdbtools` command line::
Usage: mdb-export [options] <file> <table>
where options are:
-H supress header row
-Q don't wrap text-like fields in quotes
-d <delimiter> specify a column delimiter
-R <delimiter> specify a row delimiter
-I INSERT statements (instead of CSV)
-D <format> set the date format (see strftime(3) for details)
-S Sanitize names (replace spaces etc. with underscore)
-q <char> Use <char> to wrap text-like fields. Default is ".
-X <char> Use <char> to escape quoted characters within a field. Default is doubling.
Thanks to http://farismadi.wordpress.com/2008/07/13/encoding-of-mdb-tool/
for explanations on the environment variables used by `mdb-export`.
The function :func:`check_output` in this module is a copy from Python 2.7
which we include here to make it usable in Python 2.6 too.
"""
import logging
logger = logging.getLogger(__name__)
#~ ENCODING = 'latin1' # the encoding used by the mdb file
ENCODING = 'utf8'
#~ MDB_FILE = 'PPv5MasterCopie.mdb'
MDBTOOLS_EXPORT = 'mdb-export'
import os
import sys
#~ ENCODING = sys.stdout.encoding
#~ import csv
import codecs
import datetime
from django.conf import settings
from lino.utils import ucsv
from lino.utils import dblogger
#~ ENCODING = 'latin1' # the encoding used by the mdb file
ENCODING = 'utf8'
#~ MDB_FILE = 'PPv5MasterCopie.mdb'
MDBTOOLS_EXPORT = 'mdb-export'
try:
from subprocess import check_output
except ImportError:
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
'ls: non_existent_file: No such file or directory\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class Loader:
mdb_file = None
table_name = None
model = None
def __iter__(self):
fn = self.table_name + ".csv"
if os.path.exists(fn):
logger.warning("Not re-extracting %s since it exists.",fn)
else:
args = [MDBTOOLS_EXPORT, '-D', "%Y-%m-%d %H:%M:%S", self.mdb_file, self.table_name]
s = check_output(args,executable=MDBTOOLS_EXPORT,
env=dict(
MDB_ICONV='utf-8',
MDB_JET_CHARSET='utf-8'))
#~ print ENCODING
fd = open(fn,'w')
fd.write(s)
fd.close()
logger.info("Extracted file %s", fn)
reader = ucsv.UnicodeReader(open(fn,'r'),encoding=ENCODING)
headers = reader.next()
if not headers == self.headers:
raise Exception("%r != %r" % (headers,self.headers))
n = 0
for values in reader:
row = {}
for i,h in enumerate(self.headers):
row[h] = values[i]
n += 1
if False:
if int(row['IDClient']) == 967:
print row
raise Exception("20110609")
if False:
if n < 10:
print n, ':', row
else:
raise Exception("20110609")
for obj in self.row2obj(row):
yield obj
def parsedate(self,s):
if not s: return None
dt = s.split()
if len(dt) != 2:
raise Exception("Unexpected datetime string %r" % s)
d = dt[0]
#~ t = dt[1]
a = [int(i) for i in d.split('-')]
return datetime.date(year=a[0],month=a[1],day=a[2])
def parsetime(self,s):
if not s: return None
dt = s.split()
if len(dt) != 2:
raise Exception("Unexpected datetime string %r" % s)
t = dt[1]
return t[:5]
#~ a = [int(i) for i in t.split(':')]
#~ return datetime.time(hour=a[0],minute=a[1],second=a[2])
|
MaxTyutyunnikov/lino
|
lino/utils/mdbtools.py
|
Python
|
gpl-3.0
| 6,191
|
[
"Brian"
] |
4918887029b7a713aa7ac0c11d727953ba2afeea6f7aaf94b6fe42a24bd47a4f
|
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from pyscf.nao.m_fact import sgn
#
#
#
class c2r_c():
""" Conversion from complex to real harmonics """
def __init__(self, j):
self._j = j
self._c2r = np.zeros( (2*j+1, 2*j+1), dtype=np.complex128)
self._c2r[j,j]=1.0
for m in range(1,j+1):
self._c2r[m+j, m+j] = sgn[m] * np.sqrt(0.5)
self._c2r[m+j,-m+j] = np.sqrt(0.5)
self._c2r[-m+j,-m+j]= 1j*np.sqrt(0.5)
self._c2r[-m+j, m+j]= -sgn[m] * 1j * np.sqrt(0.5)
self._hc_c2r = np.conj(self._c2r).transpose()
self._conj_c2r = np.conjugate(self._c2r) # what is the difference ? conj and conjugate
self._tr_c2r = np.transpose(self._c2r)
#print(abs(self._hc_c2r.conj().transpose()-self._c2r).sum())
#
#
#
def c2r_moo(self, j, mab_c, mu2info):
""" Transform tensor m, orb, orb given in complex spherical harmonics to real spherical harmonic"""
no = mab_c.shape[1]
mab_r = np.zeros((2*j+1, no, no)) # result
xww1 = np.zeros((2*j+1, no, no), dtype=np.complex128)
xww2 = np.zeros( (no, no), dtype=np.complex128)
xww3 = np.zeros( (no, no), dtype=np.complex128)
_j = self._j
for m in range(-j,j+1):
for m1 in range(-abs(m),abs(m)+1,2*abs(m) if m!=0 else 1):
xww1[j+m,:,:]=xww1[j+m,:,:]+self._hc_c2r[_j+m1,_j+m]*mab_c[j+m1,:,:] # _c2r or _conj_c2r
for m in range(-j,j+1):
xww2.fill(0.0)
for mu1,j1,s1,f1 in mu2info:
for m1 in range(-j1,j1+1):
for n1 in range(-abs(m1),abs(m1)+1,2*abs(m1) if m1!=0 else 1):
xww2[s1+m1+j1,:]=xww2[s1+m1+j1,:]+self._c2r[m1+_j,n1+_j] * xww1[j+m,s1+n1+j1,:]
xww3.fill(0.0)
for mu2,j2,s2,f2 in mu2info:
for m2 in range(-j2,j2+1):
for n2 in range(-abs(m2),abs(m2)+1,2*abs(m2) if m2!=0 else 1):
xww3[:,s2+m2+j2]=xww3[:,s2+m2+j2]+self._c2r[m2+_j,n2+_j] * xww2[:,s2+n2+j2]
mab_r[j+m,:,:] = xww3[:,:].real
return mab_r
#
#
#
def c2r_(self, j1,j2, jm,cmat,rmat,mat):
assert(type(mat[0,0])==np.complex128)
mat.fill(0.0)
rmat.fill(0.0)
for mm1 in range(-j1,j1+1):
for mm2 in range(-j2,j2+1):
if mm2 == 0 :
mat[mm1+jm,mm2+jm] = cmat[mm1+jm,mm2+jm]*self._tr_c2r[mm2+self._j,mm2+self._j]
else :
mat[mm1+jm,mm2+jm] = \
(cmat[mm1+jm,mm2+jm]*self._tr_c2r[mm2+self._j,mm2+self._j] + \
cmat[mm1+jm,-mm2+jm]*self._tr_c2r[-mm2+self._j,mm2+self._j])
#if j1==2 and j2==1:
# print( mm1,mm2, mat[mm1+jm,mm2+jm] )
for mm2 in range(-j2,j2+1):
for mm1 in range(-j1,j1+1):
if mm1 == 0 :
rmat[mm1+jm, mm2+jm] = (self._conj_c2r[mm1+self._j,mm1+self._j]*mat[mm1+jm,mm2+jm]).real
else :
rmat[mm1+jm, mm2+jm] = \
(self._conj_c2r[mm1+self._j,mm1+self._j] * mat[mm1+jm,mm2+jm] + \
self._conj_c2r[mm1+self._j,-mm1+self._j] * mat[-mm1+jm,mm2+jm]).real
#if j1==2 and j2==1:
# print( mm1,mm2, rmat[mm1+jm,mm2+jm] )
def c2r_vector(self, clm, l, si, fi):
assert(clm.dtype == np.complex128)
rsh = np.zeros(clm.shape, dtype=np.float64)
for m in range(-l, l+1):
if m == 0:
rsh[si + m + l] = self._c2r[m, m]*clm[si + m + l]
else:
rsh[si + m + l] = self._c2r[m, m]*clm[si + m + l] +\
self._c2r[m, -m]*clm[fi - m - l]
return rsh
|
gkc1000/pyscf
|
pyscf/nao/m_c2r.py
|
Python
|
apache-2.0
| 4,037
|
[
"PySCF"
] |
cf184fc7e849158d25afb98aef0ce528d04dda3ed1b3d86844a963e3adbebc45
|
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
import urllib
import cgi
from invenio.config import \
CFG_CERN_SITE, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_SECURE_URL, \
CFG_SITE_URL, \
CFG_WEBSESSION_RESET_PASSWORD_EXPIRE_IN_DAYS, \
CFG_WEBSESSION_ADDRESS_ACTIVATION_EXPIRE_IN_DAYS, \
CFG_WEBSESSION_DIFFERENTIATE_BETWEEN_GUESTS, \
CFG_WEBSEARCH_MAX_RECORDS_IN_GROUPS, \
CFG_ACCESS_CONTROL_LEVEL_ACCOUNTS
from invenio.access_control_config import CFG_EXTERNAL_AUTH_USING_SSO, \
CFG_EXTERNAL_AUTH_LOGOUT_SSO
from invenio.urlutils import make_canonical_urlargd, create_url, create_html_link
from invenio.htmlutils import escape_html, nmtoken_from_string
from invenio.messages import gettext_set_language, language_list_long
from invenio.websession_config import CFG_WEBSESSION_GROUP_JOIN_POLICY
class Template:
def tmpl_back_form(self, ln, message, url, link):
"""
A standard one-message-go-back-link page.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'message' *string* - The message to display
- 'url' *string* - The url to go back to
- 'link' *string* - The link text
"""
out = """
<table>
<tr>
<td align="left">%(message)s
<a href="%(url)s">%(link)s</a></td>
</tr>
</table>
"""% {
'message' : message,
'url' : url,
'link' : link,
'ln' : ln
}
return out
def tmpl_external_setting(self, ln, key, value):
_ = gettext_set_language(ln)
out = """
<tr>
<td align="right"><strong>%s:</strong></td>
<td><i>%s</i></td>
</tr>""" % (key, value)
return out
def tmpl_external_user_settings(self, ln, html_settings):
_ = gettext_set_language(ln)
out = """
<p><big><strong class="headline">%(external_user_settings)s</strong></big></p>
<table>
%(html_settings)s
</table>
<p><big><strong class="headline">%(external_user_groups)s</strong></big></p>
<p>%(consult_external_groups)s</p>
""" % {
'external_user_settings' : _('External account settings'),
'html_settings' : html_settings,
'consult_external_groups' : _('You can consult the list of your external groups directly in the %(x_url_open)sgroups page%(x_url_close)s.') % {
'x_url_open' : '<a href="../yourgroups/display?ln=%s#external_groups">' % ln,
'x_url_close' : '</a>'
},
'external_user_groups' : _('External user groups'),
}
return out
def tmpl_user_preferences(self, ln, email, email_disabled, password_disabled, nickname):
"""
Displays a form for the user to change his email/password.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'email' *string* - The email of the user
- 'email_disabled' *boolean* - If the user has the right to edit his email
- 'password_disabled' *boolean* - If the user has the right to edit his password
- 'nickname' *string* - The nickname of the user (empty string if user does not have it)
"""
# load the right message language
_ = gettext_set_language(ln)
out = """
<p><big><strong class="headline">%(edit_params)s</strong></big></p>
<form method="post" action="%(sitesecureurl)s/youraccount/change" name="edit_logins_settings">
<p>%(change_user)s</p>
<table>
<tr><td align="right" valign="top"><strong>
<label for="nickname">%(nickname_label)s:</label></strong><br />
<small class="important">(%(mandatory)s)</small>
</td><td valign="top">
%(nickname_prefix)s%(nickname)s%(nickname_suffix)s<br />
<small><span class="quicknote">%(note)s:</span>
%(fixed_nickname_note)s
</small>
</td>
</tr>
<tr><td align="right"><strong>
<label for="email">%(new_email)s:</label></strong><br />
<small class="important">(%(mandatory)s)</small>
</td><td>
<input type="text" size="25" name="email" id="email" %(email_disabled)s value="%(email)s" /><br />
<small><span class="quicknote">%(example)s:</span>
<span class="example">john.doe@example.com</span>
</small>
</td>
</tr>
<tr><td></td><td align="left">
<code class="blocknote"><input class="formbutton" type="submit" value="%(set_values)s" /></code>
</td></tr>
</table>
<input type="hidden" name="action" value="edit" />
</form>
""" % {
'change_user' : _("If you want to change your email or set for the first time your nickname, please set new values in the form below."),
'edit_params' : _("Edit login credentials"),
'nickname_label' : _("Nickname"),
'nickname' : nickname,
'nickname_prefix' : nickname=='' and '<input type="text" size="25" name="nickname" id="nickname" value=""' or '',
'nickname_suffix' : nickname=='' and '" /><br /><small><span class="quicknote">'+_("Example")+':</span><span class="example">johnd</span></small>' or '',
'new_email' : _("New email address"),
'mandatory' : _("mandatory"),
'example' : _("Example"),
'note' : _("Note"),
'set_values' : _("Set new values"),
'email' : email,
'email_disabled' : email_disabled and "readonly" or "",
'sitesecureurl': CFG_SITE_SECURE_URL,
'fixed_nickname_note' : _('Since this is considered as a signature for comments and reviews, once set it can not be changed.')
}
if not password_disabled and not CFG_EXTERNAL_AUTH_USING_SSO:
out += """
<form method="post" action="%(sitesecureurl)s/youraccount/change" name="edit_password">
<p>%(change_pass)s</p>
<table>
<tr>
<td align="right"><strong><label for="old_password">%(old_password)s:</label></strong><br />
</td><td align="left">
<input type="password" size="25" name="old_password" id="old_password" %(password_disabled)s /><br />
<small><span class="quicknote">%(note)s:</span>
%(old_password_note)s
</small>
</td>
</tr>
<tr>
<td align="right"><strong><label for="new_password">%(new_password)s:</label></strong><br />
</td><td align="left">
<input type="password" size="25" name="password" id="new_password" %(password_disabled)s /><br />
<small><span class="quicknote">%(note)s:</span>
%(password_note)s
</small>
</td>
</tr>
<tr>
<td align="right"><strong><label for="new_password2">%(retype_password)s:</label></strong></td>
<td align="left">
<input type="password" size="25" name="password2" id="new_password2" %(password_disabled)s value="" />
</td>
</tr>
<tr><td></td><td align="left">
<code class="blocknote"><input class="formbutton" type="submit" value="%(set_values)s" /></code>
</td></tr>
</table>
<input type="hidden" name="action" value="edit" />
</form>
""" % {
'change_pass' : _("If you want to change your password, please enter the old one and set the new value in the form below."),
'mandatory' : _("mandatory"),
'old_password' : _("Old password"),
'new_password' : _("New password"),
'optional' : _("optional"),
'note' : _("Note"),
'password_note' : _("The password phrase may contain punctuation, spaces, etc."),
'old_password_note' : _("You must fill the old password in order to set a new one."),
'retype_password' : _("Retype password"),
'set_values' : _("Set new password"),
'password_disabled' : password_disabled and "disabled" or "",
'sitesecureurl': CFG_SITE_SECURE_URL,
}
elif not CFG_EXTERNAL_AUTH_USING_SSO and CFG_CERN_SITE:
out += "<p>" + _("""If you are using a lightweight CERN account you can
%(x_url_open)sreset the password%(x_url_close)s.""") % \
{'x_url_open' : \
'<a href="http://cern.ch/LightweightRegistration/ResetPassword.aspx%s">' \
% (make_canonical_urlargd({'email': email, 'returnurl' : CFG_SITE_SECURE_URL + '/youraccount/edit' + make_canonical_urlargd({'lang' : ln}, {})}, {})), 'x_url_close' : '</a>'} + "</p>"
elif CFG_EXTERNAL_AUTH_USING_SSO and CFG_CERN_SITE:
out += "<p>" + _("""You can change or reset your CERN account password by means of the %(x_url_open)sCERN account system%(x_url_close)s.""") % \
{'x_url_open' : '<a href="https://cern.ch/login/password.aspx">', 'x_url_close' : '</a>'} + "</p>"
return out
def tmpl_user_bibcatalog_auth(self, bibcatalog_username="", bibcatalog_password="", ln=CFG_SITE_LANG):
"""template for setting username and pw for bibcatalog backend"""
_ = gettext_set_language(ln)
out = """
<form method="post" action="%(sitesecureurl)s/youraccount/change" name="edit_bibcatalog_settings">
<p><big><strong class="headline">%(edit_bibcatalog_settings)s</strong></big></p>
<table>
<tr>
<td> %(username)s: <input type="text" size="25" name="bibcatalog_username" value="%(bibcatalog_username)s" id="bibcatuid"></td>
<td> %(password)s: <input type="password" size="25" name="bibcatalog_password" value="%(bibcatalog_password)s" id="bibcatpw"></td>
</tr>
<tr>
<td><input class="formbutton" type="submit" value="%(update_settings)s" /></td>
</tr>
</table>
""" % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'bibcatalog_username' : bibcatalog_username,
'bibcatalog_password' : bibcatalog_password,
'edit_bibcatalog_settings' : _("Edit cataloging interface settings"),
'username' : _("Username"),
'password' : _("Password"),
'update_settings' : _('Update settings')
}
return out
def tmpl_user_lang_edit(self, ln, preferred_lang):
_ = gettext_set_language(ln)
out = """
<form method="post" action="%(sitesecureurl)s/youraccount/change" name="edit_lang_settings">
<p><big><strong class="headline">%(edit_lang_settings)s</strong></big></p>
<table>
<tr><td align="right"><select name="lang" id="lang">
""" % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'edit_lang_settings' : _("Edit language-related settings"),
}
for short_ln, long_ln in language_list_long():
out += """<option %(selected)s value="%(short_ln)s">%(long_ln)s</option>""" % {
'selected' : preferred_lang == short_ln and 'selected="selected"' or '',
'short_ln' : short_ln,
'long_ln' : escape_html(long_ln)
}
out += """</select></td><td valign="top"><strong><label for="lang">%(select_lang)s</label></strong></td></tr>
<tr><td></td><td><input class="formbutton" type="submit" value="%(update_settings)s" /></td></tr>
</table></form>""" % {
'select_lang' : _('Select desired language of the web interface.'),
'update_settings' : _('Update settings')
}
return out
def tmpl_user_websearch_edit(self, ln, current = 10, show_latestbox = True, show_helpbox = True):
_ = gettext_set_language(ln)
out = """
<form method="post" action="%(sitesecureurl)s/youraccount/change" name="edit_websearch_settings">
<p><big><strong class="headline">%(edit_websearch_settings)s</strong></big></p>
<table>
<tr><td align="right"><input type="checkbox" %(checked_latestbox)s value="1" name="latestbox" id="latestbox"/></td>
<td valign="top"><b><label for="latestbox">%(show_latestbox)s</label></b></td></tr>
<tr><td align="right"><input type="checkbox" %(checked_helpbox)s value="1" name="helpbox" id="helpbox"/></td>
<td valign="top"><b><label for="helpbox">%(show_helpbox)s</label></b></td></tr>
<tr><td align="right"><select name="group_records" id="group_records">
""" % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'edit_websearch_settings' : _("Edit search-related settings"),
'show_latestbox' : _("Show the latest additions box"),
'checked_latestbox' : show_latestbox and 'checked="checked"' or '',
'show_helpbox' : _("Show collection help boxes"),
'checked_helpbox' : show_helpbox and 'checked="checked"' or '',
}
for i in 10, 25, 50, 100, 250, 500:
if i <= CFG_WEBSEARCH_MAX_RECORDS_IN_GROUPS:
out += """<option %(selected)s>%(i)s</option>
""" % {
'selected' : current == i and 'selected="selected"' or '',
'i' : i
}
out += """</select></td><td valign="top"><strong><label for="group_records">%(select_group_records)s</label></strong></td></tr>
<tr><td></td><td><input class="formbutton" type="submit" value="%(update_settings)s" /></td></tr>
</table>
</form>""" % {
'update_settings' : _("Update settings"),
'select_group_records' : _("Number of search results per page"),
}
return out
def tmpl_user_external_auth(self, ln, methods, current, method_disabled):
"""
Displays a form for the user to change his authentication method.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'methods' *array* - The methods of authentication
- 'method_disabled' *boolean* - If the user has the right to change this
- 'current' *string* - The currently selected method
"""
# load the right message language
_ = gettext_set_language(ln)
out = """
<form method="post" action="%(sitesecureurl)s/youraccount/change">
<big><strong class="headline">%(edit_method)s</strong></big>
<p>%(explain_method)s:</p>
<table>
<tr><td valign="top"><b>%(select_method)s:</b></td><td>
""" % {
'edit_method' : _("Edit login method"),
'explain_method' : _("Please select which login method you would like to use to authenticate yourself"),
'select_method' : _("Select method"),
'sitesecureurl': CFG_SITE_SECURE_URL,
}
for system in methods:
out += """<input type="radio" name="login_method" value="%(system)s" id="%(id)s" %(disabled)s %(selected)s /><label for="%(id)s">%(system)s</label><br />""" % {
'system' : system,
'disabled' : method_disabled and 'disabled="disabled"' or "",
'selected' : current == system and 'checked="checked"' or "",
'id' : nmtoken_from_string(system),
}
out += """ </td></tr>
<tr><td> </td>
<td><input class="formbutton" type="submit" value="%(select_method)s" /></td></tr></table>
</form>""" % {
'select_method' : _("Select method"),
}
return out
def tmpl_lost_password_form(self, ln):
"""
Displays a form for the user to ask for his password sent by email.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'msg' *string* - Explicative message on top of the form.
"""
# load the right message language
_ = gettext_set_language(ln)
out = "<p>" + _("If you have lost the password for your %(sitename)s %(x_fmt_open)sinternal account%(x_fmt_close)s, then please enter your email address in the following form in order to have a password reset link emailed to you.") % {'x_fmt_open' : '<em>', 'x_fmt_close' : '</em>', 'sitename' : CFG_SITE_NAME_INTL[ln]} + "</p>"
out += """
<blockquote>
<form method="post" action="../youraccount/send_email">
<table>
<tr>
<td align="right"><strong><label for="p_email">%(email)s:</label></strong></td>
<td><input type="text" size="25" name="p_email" id="p_email" value="" />
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="action" value="lost" />
</td>
</tr>
<tr><td> </td>
<td><code class="blocknote"><input class="formbutton" type="submit" value="%(send)s" /></code></td>
</tr>
</table>
</form>
</blockquote>
""" % {
'ln': ln,
'email' : _("Email address"),
'send' : _("Send password reset link"),
}
if CFG_CERN_SITE:
out += "<p>" + _("If you have been using the %(x_fmt_open)sCERN login system%(x_fmt_close)s, then you can recover your password through the %(x_url_open)sCERN authentication system%(x_url_close)s.") % {'x_fmt_open' : '<em>', 'x_fmt_close' : '</em>', 'x_url_open' : '<a href="https://cern.ch/lightweightregistration/ResetPassword.aspx%s">' \
% make_canonical_urlargd({'lf': 'auth', 'returnURL' : CFG_SITE_SECURE_URL + '/youraccount/login?ln='+ln}, {}), 'x_url_close' : '</a>'} + " "
else:
out += "<p>" + _("Note that if you have been using an external login system, then we cannot do anything and you have to ask there.") + " "
out += _("Alternatively, you can ask %s to change your login system from external to internal.") % ("""<a href="mailto:%(email)s">%(email)s</a>""" % { 'email' : CFG_SITE_SUPPORT_EMAIL }) + "</p>"
return out
def tmpl_account_info(self, ln, uid, guest, CFG_CERN_SITE):
"""
Displays the account information
Parameters:
- 'ln' *string* - The language to display the interface in
- 'uid' *string* - The user id
- 'guest' *boolean* - If the user is guest
- 'CFG_CERN_SITE' *boolean* - If the site is a CERN site
"""
# load the right message language
_ = gettext_set_language(ln)
out = """<p>%(account_offer)s</p>
<blockquote>
<dl>
""" % {
'account_offer' : _("%s offers you the possibility to personalize the interface, to set up your own personal library of documents, or to set up an automatic alert query that would run periodically and would notify you of search results by email.") % CFG_SITE_NAME_INTL[ln],
}
if not guest:
out += """
<dt>
<a href="./edit?ln=%(ln)s">%(your_settings)s</a>
</dt>
<dd>%(change_account)s</dd>""" % {
'ln' : ln,
'your_settings' : _("Your Settings"),
'change_account' : _("Set or change your account email address or password. Specify your preferences about the look and feel of the interface.")
}
out += """
<dt><a href="../youralerts/display?ln=%(ln)s">%(your_searches)s</a></dt>
<dd>%(search_explain)s</dd>""" % {
'ln' : ln,
'your_searches' : _("Your Searches"),
'search_explain' : _("View all the searches you performed during the last 30 days."),
}
out += """
<dt><a href="../yourbaskets/display?ln=%(ln)s">%(your_baskets)s</a></dt>
<dd>%(basket_explain)s""" % {
'ln' : ln,
'your_baskets' : _("Your Baskets"),
'basket_explain' : _("With baskets you can define specific collections of items, store interesting records you want to access later or share with others."),
}
if guest and CFG_WEBSESSION_DIFFERENTIATE_BETWEEN_GUESTS:
out += self.tmpl_warning_guest_user(ln = ln, type = "baskets")
out += """</dd>
<dt><a href="../youralerts/list?ln=%(ln)s">%(your_alerts)s</a></dt>
<dd>%(explain_alerts)s""" % {
'ln' : ln,
'your_alerts' : _("Your Alerts"),
'explain_alerts' : _("Subscribe to a search which will be run periodically by our service. The result can be sent to you via Email or stored in one of your baskets."),
}
if guest and CFG_WEBSESSION_DIFFERENTIATE_BETWEEN_GUESTS:
out += self.tmpl_warning_guest_user(type="alerts", ln = ln)
out += "</dd>"
if CFG_CERN_SITE:
out += """</dd>
<dt><a href="http://weblib.cern.ch/cgi-bin/checkloan?uid=&version=2">%(your_loans)s</a></dt>
<dd>%(explain_loans)s</dd>""" % {
'your_loans' : _("Your Loans"),
'explain_loans' : _("Check out book you have on loan, submit borrowing requests, etc. Requires CERN ID."),
}
out += """
</dl>
</blockquote>"""
return out
def tmpl_warning_guest_user(self, ln, type):
"""
Displays a warning message about the specified type
Parameters:
- 'ln' *string* - The language to display the interface in
- 'type' *string* - The type of data that will get lost in case of guest account (for the moment: 'alerts' or 'baskets')
"""
# load the right message language
_ = gettext_set_language(ln)
if (type=='baskets'):
msg = _("You are logged in as a guest user, so your baskets will disappear at the end of the current session.") + ' '
elif (type=='alerts'):
msg = _("You are logged in as a guest user, so your alerts will disappear at the end of the current session.") + ' '
msg += _("If you wish you can %(x_url_open)slogin or register here%(x_url_close)s.") % {'x_url_open': '<a href="' + CFG_SITE_SECURE_URL + '/youraccount/login?ln=' + ln + '">',
'x_url_close': '</a>'}
return """<table class="errorbox" summary="">
<tr>
<th class="errorboxheader">%s</th>
</tr>
</table>""" % msg
def tmpl_account_body(self, ln, user):
"""
Displays the body of the actions of the user
Parameters:
- 'ln' *string* - The language to display the interface in
- 'user' *string* - The username (nickname or email)
"""
# load the right message language
_ = gettext_set_language(ln)
out = _("You are logged in as %(x_user)s. You may want to a) %(x_url1_open)slogout%(x_url1_close)s; b) edit your %(x_url2_open)saccount settings%(x_url2_close)s.") %\
{'x_user': user,
'x_url1_open': '<a href="' + CFG_SITE_SECURE_URL + '/youraccount/logout?ln=' + ln + '">',
'x_url1_close': '</a>',
'x_url2_open': '<a href="' + CFG_SITE_SECURE_URL + '/youraccount/edit?ln=' + ln + '">',
'x_url2_close': '</a>',
}
return out + "<br /><br />"
def tmpl_account_template(self, title, body, ln, url):
"""
Displays a block of the your account page
Parameters:
- 'ln' *string* - The language to display the interface in
- 'title' *string* - The title of the block
- 'body' *string* - The body of the block
- 'url' *string* - The URL to go to the proper section
"""
out ="""
<table class="youraccountbox" width="90%%" summary="" >
<tr>
<th class="youraccountheader"><a href="%s">%s</a></th>
</tr>
<tr>
<td class="youraccountbody">%s</td>
</tr>
</table>""" % (url, title, body)
return out
def tmpl_account_page(self, ln, warnings, warning_list, accBody, baskets, alerts, searches, messages, loans, groups, submissions, approvals, tickets, administrative):
"""
Displays the your account page
Parameters:
- 'ln' *string* - The language to display the interface in
- 'accBody' *string* - The body of the heading block
- 'baskets' *string* - The body of the baskets block
- 'alerts' *string* - The body of the alerts block
- 'searches' *string* - The body of the searches block
- 'messages' *string* - The body of the messages block
- 'groups' *string* - The body of the groups block
- 'submissions' *string* - The body of the submission block
- 'approvals' *string* - The body of the approvals block
- 'administrative' *string* - The body of the administrative block
"""
# load the right message language
_ = gettext_set_language(ln)
out = ""
if warnings == "1":
out += self.tmpl_general_warnings(warning_list)
out += self.tmpl_account_template(_("Your Account"), accBody, ln, '/youraccount/edit?ln=%s' % ln)
if messages:
out += self.tmpl_account_template(_("Your Messages"), messages, ln, '/yourmessages/display?ln=%s' % ln)
if loans:
out += self.tmpl_account_template(_("Your Loans"), loans, ln, '/yourloans/display?ln=%s' % ln)
if baskets:
out += self.tmpl_account_template(_("Your Baskets"), baskets, ln, '/yourbaskets/display?ln=%s' % ln)
if alerts:
out += self.tmpl_account_template(_("Your Alert Searches"), alerts, ln, '/youralerts/list?ln=%s' % ln)
if searches:
out += self.tmpl_account_template(_("Your Searches"), searches, ln, '/youralerts/display?ln=%s' % ln)
if groups:
groups_description = _("You can consult the list of %(x_url_open)syour groups%(x_url_close)s you are administering or are a member of.")
groups_description %= {'x_url_open': '<a href="' + CFG_SITE_URL + '/yourgroups/display?ln=' + ln + '">',
'x_url_close': '</a>'}
out += self.tmpl_account_template(_("Your Groups"), groups_description, ln, '/yourgroups/display?ln=%s' % ln)
if submissions:
submission_description = _("You can consult the list of %(x_url_open)syour submissions%(x_url_close)s and inquire about their status.")
submission_description %= {'x_url_open': '<a href="' + CFG_SITE_URL + '/yoursubmissions.py?ln=' + ln + '">',
'x_url_close': '</a>'}
out += self.tmpl_account_template(_("Your Submissions"), submission_description, ln, '/yoursubmissions.py?ln=%s' % ln)
if approvals:
approval_description = _("You can consult the list of %(x_url_open)syour approvals%(x_url_close)s with the documents you approved or refereed.")
approval_description %= {'x_url_open': '<a href="' + CFG_SITE_URL + '/yourapprovals.py?ln=' + ln + '">',
'x_url_close': '</a>'}
out += self.tmpl_account_template(_("Your Approvals"), approval_description, ln, '/yourapprovals.py?ln=%s' % ln)
#check if this user might have tickets
if tickets:
ticket_description = _("You can consult the list of %(x_url_open)syour tickets%(x_url_close)s.")
ticket_description %= {'x_url_open': '<a href="' + CFG_SITE_URL + '/yourtickets?ln=' + ln + '">',
'x_url_close': '</a>'}
out += self.tmpl_account_template(_("Your Tickets"), ticket_description, ln, '/yourtickets?ln=%s' % ln)
if administrative:
out += self.tmpl_account_template(_("Your Administrative Activities"), administrative, ln, '/admin')
return out
def tmpl_account_emailMessage(self, ln, msg):
"""
Displays a link to retrieve the lost password
Parameters:
- 'ln' *string* - The language to display the interface in
- 'msg' *string* - Explicative message on top of the form.
"""
# load the right message language
_ = gettext_set_language(ln)
out =""
out +="""
<body>
%(msg)s <a href="../youraccount/lost?ln=%(ln)s">%(try_again)s</a>
</body>
""" % {
'ln' : ln,
'msg' : msg,
'try_again' : _("Try again")
}
return out
def tmpl_account_reset_password_email_body(self, email, reset_key, ip_address, ln=CFG_SITE_LANG):
"""
The body of the email that sends lost internal account
passwords to users.
"""
_ = gettext_set_language(ln)
out = """
%(intro)s
%(intro2)s
<%(link)s>
%(outro)s
%(outro2)s""" % {
'intro': _("Somebody (possibly you) coming from %(x_ip_address)s "
"has asked\nfor a password reset at %(x_sitename)s\nfor "
"the account \"%(x_email)s\"." % {
'x_sitename' :CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME),
'x_email' : email,
'x_ip_address' : ip_address,
}
),
'intro2' : _("If you want to reset the password for this account, please go to:"),
'link' : "%s/youraccount/access%s" %
(CFG_SITE_SECURE_URL, make_canonical_urlargd({
'ln' : ln,
'mailcookie' : reset_key
}, {})),
'outro' : _("in order to confirm the validity of this request."),
'outro2' : _("Please note that this URL will remain valid for about %(days)s days only.") % {'days': CFG_WEBSESSION_RESET_PASSWORD_EXPIRE_IN_DAYS},
}
return out
def tmpl_account_address_activation_email_body(self, email, address_activation_key, ip_address, ln=CFG_SITE_LANG):
"""
The body of the email that sends email address activation cookie
passwords to users.
"""
_ = gettext_set_language(ln)
out = """
%(intro)s
%(intro2)s
<%(link)s>
%(outro)s
%(outro2)s""" % {
'intro': _("Somebody (possibly you) coming from %(x_ip_address)s "
"has asked\nto register a new account at %(x_sitename)s\nfor the "
"email address \"%(x_email)s\"." % {
'x_sitename' :CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME),
'x_email' : email,
'x_ip_address' : ip_address,
}
),
'intro2' : _("If you want to complete this account registration, please go to:"),
'link' : "%s/youraccount/access%s" %
(CFG_SITE_SECURE_URL, make_canonical_urlargd({
'ln' : ln,
'mailcookie' : address_activation_key
}, {})),
'outro' : _("in order to confirm the validity of this request."),
'outro2' : _("Please note that this URL will remain valid for about %(days)s days only.") % {'days' : CFG_WEBSESSION_ADDRESS_ACTIVATION_EXPIRE_IN_DAYS},
}
return out
def tmpl_account_emailSent(self, ln, email):
"""
Displays a confirmation message for an email sent
Parameters:
- 'ln' *string* - The language to display the interface in
- 'email' *string* - The email to which the message has been sent
"""
# load the right message language
_ = gettext_set_language(ln)
out =""
out += _("Okay, a password reset link has been emailed to %s.") % email
return out
def tmpl_account_delete(self, ln):
"""
Displays a confirmation message about deleting the account
Parameters:
- 'ln' *string* - The language to display the interface in
"""
# load the right message language
_ = gettext_set_language(ln)
out = "<p>" + _("""Deleting your account""") + '</p>'
return out
def tmpl_account_logout(self, ln):
"""
Displays a confirmation message about logging out
Parameters:
- 'ln' *string* - The language to display the interface in
"""
# load the right message language
_ = gettext_set_language(ln)
out = _("You are no longer recognized by our system.") + ' '
if CFG_EXTERNAL_AUTH_USING_SSO and CFG_EXTERNAL_AUTH_LOGOUT_SSO:
out += _("""You are still recognized by the centralized
%(x_fmt_open)sSSO%(x_fmt_close)s system. You can
%(x_url_open)slogout from SSO%(x_url_close)s, too.""") % \
{'x_fmt_open' : '<strong>', 'x_fmt_close' : '</strong>',
'x_url_open' : '<a href="%s">' % CFG_EXTERNAL_AUTH_LOGOUT_SSO,
'x_url_close' : '</a>'}
out += '<br />'
out += _("If you wish you can %(x_url_open)slogin here%(x_url_close)s.") % \
{'x_url_open': '<a href="./login?ln=' + ln + '">',
'x_url_close': '</a>'}
return out
def tmpl_login_form(self, ln, referer, internal, register_available, methods, selected_method, msg=None):
"""
Displays a login form
Parameters:
- 'ln' *string* - The language to display the interface in
- 'referer' *string* - The referer URL - will be redirected upon after login
- 'internal' *boolean* - If we are producing an internal authentication
- 'register_available' *boolean* - If users can register freely in the system
- 'methods' *array* - The available authentication methods
- 'selected_method' *string* - The default authentication method
- 'msg' *string* - The message to print before the form, if needed
"""
# load the right message language
_ = gettext_set_language(ln)
if msg is "":
out = "<p>%(please_login)s</p>" % {
'please_login' : _("If you already have an account, please login using the form below.")
}
if CFG_CERN_SITE:
out += "<p>" + _("If you don't own a CERN account yet, you can register a %(x_url_open)snew CERN lightweight account%(x_url_close)s.") % {'x_url_open' : '<a href="https://www.cern.ch/lightweightregistration/RegisterAccount.aspx">', 'x_url_close' : '</a>'} + "</p>"
else:
if register_available:
out += "<p>"+_("If you don't own an account yet, please %(x_url_open)sregister%(x_url_close)s an internal account.") %\
{'x_url_open': '<a href="../youraccount/register?ln=' + ln + '">',
'x_url_close': '</a>'} + "</p>"
else:
# users cannot register accounts, so advise them
# how to get one, or be silent about register
# facility if account level is more than 4:
if CFG_ACCESS_CONTROL_LEVEL_ACCOUNTS < 5:
out += "<p>" + _("If you don't own an account yet, please contact %s.") % ('<a href="mailto:%s">%s</a>' % (CFG_SITE_SUPPORT_EMAIL, CFG_SITE_SUPPORT_EMAIL)) + "</p>"
else:
out = "<p>%s</p>" % msg
out += """<form method="post" action="../youraccount/login">
<table>
"""
if len(methods) > 1:
# more than one method, must make a select
login_select = """<select name="login_method" id="login_method">"""
for method in methods:
login_select += """<option value="%(method)s" %(selected)s>%(method)s</option>""" % {
'method' : method,
'selected' : (method == selected_method and 'selected="selected"' or "")
}
login_select += "</select>"
out += """
<tr>
<td align="right"><strong><label for="login_method">%(login_title)s</label></strong></td>
<td>%(login_select)s</td>
</tr>""" % {
'login_title' : _("Login method:"),
'login_select' : login_select,
}
else:
# only one login method available
out += """<input type="hidden" name="login_method" value="%s" />""" % (methods[0])
out += """<tr>
<td align="right">
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="referer" value="%(referer)s" />
<strong><label for="p_un">%(username)s:</label></strong>
</td>
<td><input type="text" size="25" name="p_un" id="p_un" value="" /></td>
</tr>
<tr>
<td align="right"><strong><label for="p_pw">%(password)s:</label></strong></td>
<td align="left"><input type="password" size="25" name="p_pw" id="p_pw" value="" /></td>
</tr>
<tr>
<td></td>
<td align="left"><input type="checkbox" name="remember_me" id="remember_me"/><em><label for="remember_me">%(remember_me)s</label></em></td>
<tr>
<td></td>
<td align="center" colspan="3"><code class="blocknote"><input class="formbutton" type="submit" name="action" value="%(login)s" /></code>""" % {
'ln': ln,
'referer' : cgi.escape(referer),
'username' : _("Username"),
'password' : _("Password"),
'remember_me' : _("Remember login on this computer."),
'login' : _("login"),
}
if internal:
out += """ (<a href="./lost?ln=%(ln)s">%(lost_pass)s</a>)""" % {
'ln' : ln,
'lost_pass' : _("Lost your password?")
}
out += """</td>
</tr>
</table></form>"""
out += """<p><strong>%(note)s:</strong> %(note_text)s</p>""" % {
'note' : _("Note"),
'note_text': _("You can use your nickname or your email address to login.")}
return out
def tmpl_lost_your_password_teaser(self, ln=CFG_SITE_LANG):
"""Displays a short sentence to attract user to the fact that
maybe he lost his password. Used by the registration page.
"""
_ = gettext_set_language(ln)
out = ""
out += """<a href="./lost?ln=%(ln)s">%(maybe_lost_pass)s</a>""" % {
'ln' : ln,
'maybe_lost_pass': ("Maybe you have lost your password?")
}
return out
def tmpl_reset_password_form(self, ln, email, reset_key, msg=''):
"""Display a form to reset the password."""
_ = gettext_set_language(ln)
out = ""
out = "<p>%s</p>" % _("Your request is valid. Please set the new "
"desired password in the following form.")
if msg:
out += """<p class='warning'>%s</p>""" % msg
out += """
<form method="post" action="../youraccount/resetpassword?ln=%(ln)s">
<input type="hidden" name="k" value="%(reset_key)s" />
<input type="hidden" name="e" value="%(email)s" />
<input type="hidden" name="reset" value="1" />
<table>
<tr><td align="right"><strong>%(set_password_for)s</strong>:</td><td><em>%(email)s</em></td></tr>
<tr><td align="right"><strong><label for="password">%(type_new_password)s:</label></strong></td>
<td><input type="password" name="password" id="password" value="123" /></td></tr>
<tr><td align="right"><strong><label for="password2">%(type_it_again)s:</label></strong></td>
<td><input type="password" name="password2" id="password2" value="" /></td></tr>
<tr><td align="center" colspan="2">
<input class="formbutton" type="submit" name="action" value="%(set_new_password)s" />
</td></tr>
</table>
</form>""" % {
'ln' : ln,
'reset_key' : reset_key,
'email' : email,
'set_password_for' : _('Set a new password for'),
'type_new_password' : _('Type the new password'),
'type_it_again' : _('Type again the new password'),
'set_new_password' : _('Set the new password')
}
return out
def tmpl_register_page(self, ln, referer, level):
"""
Displays a login form
Parameters:
- 'ln' *string* - The language to display the interface in
- 'referer' *string* - The referer URL - will be redirected upon after login
- 'level' *int* - Login level (0 - all access, 1 - accounts activated, 2+ - no self-registration)
"""
# load the right message language
_ = gettext_set_language(ln)
out = ""
if level <= 1:
out += _("Please enter your email address and desired nickname and password:")
if level == 1:
out += _("It will not be possible to use the account before it has been verified and activated.")
out += """
<form method="post" action="../youraccount/register">
<input type="hidden" name="referer" value="%(referer)s" />
<input type="hidden" name="ln" value="%(ln)s" />
<table>
<tr>
<td align="right"><strong><label for="p_email">%(email_address)s:</label></strong><br /><small class="important">(%(mandatory)s)</small></td>
<td><input type="text" size="25" name="p_email" id="p_email" value="" /><br />
<small><span class="quicknote">%(example)s:</span>
<span class="example">john.doe@example.com</span></small>
</td>
<td></td>
</tr>
<tr>
<td align="right"><strong><label for="p_nickname">%(nickname)s:</label></strong><br /><small class="important">(%(mandatory)s)</small></td>
<td><input type="text" size="25" name="p_nickname" id="p_nickname" value="" /><br />
<small><span class="quicknote">%(example)s:</span>
<span class="example">johnd</span></small>
</td>
<td></td>
</tr>
<tr>
<td align="right"><strong><label for="p_pw">%(password)s:</label></strong><br /><small class="quicknote">(%(optional)s)</small></td>
<td align="left"><input type="password" size="25" name="p_pw" id="p_pw" value="" /><br />
<small><span class="quicknote">%(note)s:</span> %(password_contain)s</small>
</td>
<td></td>
</tr>
<tr>
<td align="right"><strong><label for="p_pw2">%(retype)s:</label></strong></td>
<td align="left"><input type="password" size="25" name="p_pw2" id="p_pw2" value="" /></td>
<td></td>
</tr>
<tr>
<td></td>
<td align="left" colspan="3"><code class="blocknote"><input class="formbutton" type="submit" name="action" value="%(register)s" /></code></td>
</tr>
</table>
</form>
<p><strong>%(note)s:</strong> %(explain_acc)s""" % {
'referer' : cgi.escape(referer),
'ln' : cgi.escape(ln),
'email_address' : _("Email address"),
'nickname' : _("Nickname"),
'password' : _("Password"),
'mandatory' : _("mandatory"),
'optional' : _("optional"),
'example' : _("Example"),
'note' : _("Note"),
'password_contain' : _("The password phrase may contain punctuation, spaces, etc."),
'retype' : _("Retype Password"),
'register' : _("register"),
'explain_acc' : _("Please do not use valuable passwords such as your Unix, AFS or NICE passwords with this service. Your email address will stay strictly confidential and will not be disclosed to any third party. It will be used to identify you for personal services of %s. For example, you may set up an automatic alert search that will look for new preprints and will notify you daily of new arrivals by email.") % CFG_SITE_NAME,
}
else:
# level >=2, so users cannot register accounts
out += "<p>" + _("It is not possible to create an account yourself. Contact %s if you want an account.") % ('<a href="mailto:%s">%s</a>' % (CFG_SITE_SUPPORT_EMAIL, CFG_SITE_SUPPORT_EMAIL)) + "</p>"
return out
def tmpl_account_adminactivities(self, ln, uid, guest, roles, activities):
"""
Displays the admin activities block for this user
Parameters:
- 'ln' *string* - The language to display the interface in
- 'uid' *string* - The used id
- 'guest' *boolean* - If the user is guest
- 'roles' *array* - The current user roles
- 'activities' *array* - The user allowed activities
"""
# load the right message language
_ = gettext_set_language(ln)
out = ""
# guest condition
if guest:
return _("You seem to be a guest user. You have to %(x_url_open)slogin%(x_url_close)s first.") % \
{'x_url_open': '<a href="../youraccount/login?ln=' + ln + '">',
'x_url_close': '<a/>'}
# no rights condition
if not roles:
return "<p>" + _("You are not authorized to access administrative functions.") + "</p>"
# displaying form
out += "<p>" + _("You are enabled to the following roles: %(x_role)s.") % {'x_role': ('<em>' + ", ".join(roles) + "</em>")} + '</p>'
if activities:
# print proposed links:
activities.sort(lambda x, y: cmp(x.lower(), y.lower()))
tmp_out = ''
for action in activities:
if action == "runbibedit":
tmp_out += """<br /> <a href="%s/record/edit/">%s</a>""" % (CFG_SITE_URL, _("Run Record Editor"))
if action == "runbibeditmulti":
tmp_out += """<br /> <a href="%s/record/multiedit/">%s</a>""" % (CFG_SITE_URL, _("Run Multi-Record Editor"))
if action == "runbibcirculation":
tmp_out += """<br /> <a href="%s/admin/bibcirculation/bibcirculationadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Run BibCirculation"))
if action == "runbibmerge":
tmp_out += """<br /> <a href="%s/record/merge/">%s</a>""" % (CFG_SITE_URL, _("Run Record Merger"))
if action == "runbatchuploader":
tmp_out += """<br /> <a href="%s/batchuploader/metadata?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Run Batch Uploader"))
if action == "cfgbibformat":
tmp_out += """<br /> <a href="%s/admin/bibformat/bibformatadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure BibFormat"))
tmp_out += """<br /> <a href="%s/kb?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure BibKnowledge"))
if action == "cfgoaiharvest":
tmp_out += """<br /> <a href="%s/admin/bibharvest/oaiharvestadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure OAI Harvest"))
if action == "cfgoairepository":
tmp_out += """<br /> <a href="%s/admin/bibharvest/oairepositoryadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure OAI Repository"))
if action == "cfgbibindex":
tmp_out += """<br /> <a href="%s/admin/bibindex/bibindexadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure BibIndex"))
if action == "cfgbibrank":
tmp_out += """<br /> <a href="%s/admin/bibrank/bibrankadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure BibRank"))
if action == "cfgwebaccess":
tmp_out += """<br /> <a href="%s/admin/webaccess/webaccessadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure WebAccess"))
if action == "cfgwebcomment":
tmp_out += """<br /> <a href="%s/admin/webcomment/webcommentadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure WebComment"))
if action == "cfgwebjournal":
tmp_out += """<br /> <a href="%s/admin/webjournal/webjournaladmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure WebJournal"))
if action == "cfgwebsearch":
tmp_out += """<br /> <a href="%s/admin/websearch/websearchadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure WebSearch"))
if action == "cfgwebsubmit":
tmp_out += """<br /> <a href="%s/admin/websubmit/websubmitadmin.py?ln=%s">%s</a>""" % (CFG_SITE_URL, ln, _("Configure WebSubmit"))
if tmp_out:
out += _("Here are some interesting web admin links for you:") + tmp_out
out += "<br />" + _("For more admin-level activities, see the complete %(x_url_open)sAdmin Area%(x_url_close)s.") %\
{'x_url_open': '<a href="' + CFG_SITE_URL + '/help/admin?ln=' + ln + '">',
'x_url_close': '</a>'}
return out
def tmpl_create_userinfobox(self, ln, url_referer, guest, username, submitter, referee, admin, usebaskets, usemessages, usealerts, usegroups, useloans, usestats):
"""
Displays the user block
Parameters:
- 'ln' *string* - The language to display the interface in
- 'url_referer' *string* - URL of the page being displayed
- 'guest' *boolean* - If the user is guest
- 'username' *string* - The username (nickname or email)
- 'submitter' *boolean* - If the user is submitter
- 'referee' *boolean* - If the user is referee
- 'admin' *boolean* - If the user is admin
- 'usebaskets' *boolean* - If baskets are enabled for the user
- 'usemessages' *boolean* - If messages are enabled for the user
- 'usealerts' *boolean* - If alerts are enabled for the user
- 'usegroups' *boolean* - If groups are enabled for the user
- 'useloans' *boolean* - If loans are enabled for the user
- 'usestats' *boolean* - If stats are enabled for the user
@note: with the update of CSS classes (cds.cds ->
invenio.css), the variables useloans etc are not used in
this function, since they are in the menus. But we keep
them in the function signature for backwards
compatibility.
"""
# load the right message language
_ = gettext_set_language(ln)
out = """<img src="%s/img/user-icon-1-20x20.gif" border="0" alt=""/> """ % CFG_SITE_URL
if guest:
out += """%(guest_msg)s ::
<a class="userinfo" href="%(sitesecureurl)s/youraccount/login?ln=%(ln)s%(referer)s">%(login)s</a>""" % {
'sitesecureurl': CFG_SITE_SECURE_URL,
'ln' : ln,
'guest_msg' : _("guest"),
'referer' : url_referer and ('&referer=%s' % urllib.quote(url_referer)) or '',
'login' : _('login')
}
else:
out += """
<a class="userinfo" href="%(sitesecureurl)s/youraccount/display?ln=%(ln)s">%(username)s</a> :: """ % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'ln' : ln,
'username' : username
}
out += """<a class="userinfo" href="%(sitesecureurl)s/youraccount/logout?ln=%(ln)s">%(logout)s</a>""" % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'ln' : ln,
'logout' : _("logout"),
}
return out
def tmpl_create_useractivities_menu(self, ln, selected, url_referer, guest, username, submitter, referee, admin, usebaskets, usemessages, usealerts, usegroups, useloans, usestats):
"""
Returns the main navigation menu with actions based on user's
priviledges
@param ln: The language to display the interface in
@type ln: string
@param selected: If the menu is currently selected
@type selected: boolean
@param url_referer: URL of the page being displayed
@type url_referer: string
@param guest: If the user is guest
@type guest: string
@param username: The username (nickname or email)
@type username: string
@param submitter: If the user is submitter
@type submitter: boolean
@param referee: If the user is referee
@type referee: boolean
@param admin: If the user is admin
@type admin: boolean
@param usebaskets: If baskets are enabled for the user
@type usebaskets: boolean
@param usemessages: If messages are enabled for the user
@type usemessages: boolean
@param usealerts: If alerts are enabled for the user
@type usealerts: boolean
@param usegroups: If groups are enabled for the user
@type usegroups: boolean
@param useloans: If loans are enabled for the user
@type useloans: boolean
@param usestats: If stats are enabled for the user
@type usestats: boolean
@return: html menu of the user activities
@rtype: string
"""
# load the right message language
_ = gettext_set_language(ln)
out = '''<div class="hassubmenu%(on)s">
<a hreflang="en" class="header%(selected)s" href="%(CFG_SITE_SECURE_URL)s/youraccount/display?ln=%(ln)s">%(personalize)s</a>
<ul class="subsubmenu" style="width: 13em;">''' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'personalize': _("Personalize"),
'on': selected and " on" or '',
'selected': selected and "selected" or ''
}
if not guest:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/youraccount/display?ln=%(ln)s">%(account)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'account' : _('Your account')
}
if usealerts or guest:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/youralerts/list?ln=%(ln)s">%(alerts)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'alerts' : _('Your alerts')
}
if referee:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yourapprovals.py?ln=%(ln)s">%(approvals)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'approvals' : _('Your approvals')
}
if usebaskets or guest:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yourbaskets/display?ln=%(ln)s">%(baskets)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'baskets' : _('Your baskets')
}
if usegroups:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yourgroups/display?ln=%(ln)s">%(groups)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'groups' : _('Your groups')
}
if useloans:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yourloans/display?ln=%(ln)s">%(loans)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'loans' : _('Your loans')
}
if usemessages:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yourmessages/display?ln=%(ln)s">%(messages)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'messages' : _('Your messages')
}
if submitter:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/yoursubmissions.py?ln=%(ln)s">%(submissions)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'submissions' : _('Your submissions')
}
if usealerts or guest:
out += '<li><a href="%(CFG_SITE_SECURE_URL)s/youralerts/display?ln=%(ln)s">%(searches)s</a></li>' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'searches' : _('Your searches')
}
out += '</ul></div>'
return out
def tmpl_create_adminactivities_menu(self, ln, selected, url_referer, guest, username, submitter, referee, admin, usebaskets, usemessages, usealerts, usegroups, useloans, usestats, activities):
"""
Returns the main navigation menu with actions based on user's
priviledges
@param ln: The language to display the interface in
@type ln: string
@param selected: If the menu is currently selected
@type selected: boolean
@param url_referer: URL of the page being displayed
@type url_referer: string
@param guest: If the user is guest
@type guest: string
@param username: The username (nickname or email)
@type username: string
@param submitter: If the user is submitter
@type submitter: boolean
@param referee: If the user is referee
@type referee: boolean
@param admin: If the user is admin
@type admin: boolean
@param usebaskets: If baskets are enabled for the user
@type usebaskets: boolean
@param usemessages: If messages are enabled for the user
@type usemessages: boolean
@param usealerts: If alerts are enabled for the user
@type usealerts: boolean
@param usegroups: If groups are enabled for the user
@type usegroups: boolean
@param useloans: If loans are enabled for the user
@type useloans: boolean
@param usestats: If stats are enabled for the user
@type usestats: boolean
@param activities: dictionary of admin activities
@rtype activities: dict
@return: html menu of the user activities
@rtype: string
"""
# load the right message language
_ = gettext_set_language(ln)
out = ''
if activities:
out += '''<div class="hassubmenu%(on)s">
<a hreflang="en" class="header%(selected)s" href="%(CFG_SITE_SECURE_URL)s/youraccount/youradminactivities?ln=%(ln)s">%(admin)s</a>
<ul class="subsubmenu" style="width: 19em;">''' % {
'CFG_SITE_SECURE_URL' : CFG_SITE_SECURE_URL,
'ln' : ln,
'admin': _("Administration"),
'on': selected and " on" or '',
'selected': selected and "selected" or ''
}
for name in sorted(activities.iterkeys()):
url = activities[name]
out += '<li><a href="%(url)s">%(name)s</a></li>' % {
'url': url,
'name': name
}
if usestats:
out += """<li><a href="%(CFG_SITE_URL)s/stats/?ln=%(ln)s">%(stats)s</a></li>""" % {
'CFG_SITE_URL' : CFG_SITE_URL,
'ln' : ln,
'stats' : _("Statistics"),
}
out += '</ul></div>'
return out
def tmpl_warning(self, warnings, ln=CFG_SITE_LANG):
"""
Prepare the warnings list
@param warnings: list of warning tuples (warning_msg, arg1, arg2, etc)
@return: html string of warnings
"""
from invenio.errorlib import get_msgs_for_code_list
span_class = 'important'
out = ""
if type(warnings) is not list:
warnings = [warnings]
if len(warnings) > 0:
warnings_parsed = get_msgs_for_code_list(warnings, 'warning', ln)
for (warning_code, warning_text) in warnings_parsed:
if not warning_code.startswith('WRN'):
#display only warnings that begin with WRN to user
continue
span_class = 'important'
out += '''
<span class="%(span_class)s">%(warning)s</span><br />''' % \
{ 'span_class' : span_class,
'warning' : warning_text }
return out
else:
return ""
def tmpl_warnings(self, warnings, ln=CFG_SITE_LANG):
"""
Display len(warnings) warning fields
@param infos: list of strings
@param ln=language
@return: html output
"""
if not((type(warnings) is list) or (type(warnings) is tuple)):
warnings = [warnings]
warningbox = ""
if warnings != []:
warningbox = "<div class=\"warningbox\">\n <b>Warning:</b>\n"
for warning in warnings:
lines = warning.split("\n")
warningbox += " <p>"
for line in lines[0:-1]:
warningbox += line + " <br />\n"
warningbox += lines[-1] + " </p>"
warningbox += "</div><br />\n"
return warningbox
def tmpl_display_all_groups(self,
infos,
admin_group_html,
member_group_html,
external_group_html = None,
warnings=[],
ln=CFG_SITE_LANG):
"""
Displays the 3 tables of groups: admin, member and external
Parameters:
- 'ln' *string* - The language to display the interface in
- 'admin_group_html' *string* - HTML code for displaying all the groups
the user is the administrator of
- 'member_group_html' *string* - HTML code for displaying all the groups
the user is member of
- 'external_group_html' *string* - HTML code for displaying all the
external groups the user is member of
"""
_ = gettext_set_language(ln)
group_text = self.tmpl_infobox(infos)
group_text += self.tmpl_warning(warnings)
if external_group_html:
group_text += """
<table>
<tr>
<td>%s</td>
</tr>
<tr>
<td><br />%s</td>
</tr>
<tr>
<td><br /><a name='external_groups'></a>%s</td>
</tr>
</table>""" %(admin_group_html, member_group_html, external_group_html)
else:
group_text += """
<table>
<tr>
<td>%s</td>
</tr>
<tr>
<td><br />%s</td>
</tr>
</table>""" %(admin_group_html, member_group_html)
return group_text
def tmpl_display_admin_groups(self, groups, ln=CFG_SITE_LANG):
"""
Display the groups the user is admin of.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'groups' *list* - All the group the user is admin of
- 'infos' *list* - Display infos on top of admin group table
"""
_ = gettext_set_language(ln)
img_link = """
<a href="%(siteurl)s/yourgroups/%(action)s?grpID=%(grpID)s&ln=%(ln)s">
<img src="%(siteurl)s/img/%(img)s" alt="%(text)s" style="border:0" width="25"
height="25" /><br /><small>%(text)s</small>
</a>"""
out = self.tmpl_group_table_title(img="/img/group_admin.png",
text=_("You are an administrator of the following groups:") )
out += """
<table class="mailbox">
<thead class="mailboxheader">
<tr class="inboxheader">
<td>%s</td>
<td>%s</td>
<td style="width: 20px;" > </td>
<td style="width: 20px;"> </td>
</tr>
</thead>
<tfoot>
<tr style="height:0px;">
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
</tfoot>
<tbody class="mailboxbody">""" %(_("Group"), _("Description"))
if len(groups) == 0:
out += """
<tr class="mailboxrecord" style="height: 100px;">
<td colspan="4" style="text-align: center;">
<small>%s</small>
</td>
</tr>""" %(_("You are not an administrator of any groups."),)
for group_data in groups:
(grpID, name, description) = group_data
edit_link = img_link % {'siteurl' : CFG_SITE_URL,
'grpID' : grpID,
'ln': ln,
'img':"webbasket_create_small.png",
'text':_("Edit group"),
'action':"edit"
}
members_link = img_link % {'siteurl' : CFG_SITE_URL,
'grpID' : grpID,
'ln': ln,
'img':"webbasket_usergroup.png",
'text':_("Edit %s members") % '',
'action':"members"
}
out += """
<tr class="mailboxrecord">
<td>%s</td>
<td>%s</td>
<td style="text-align: center;" >%s</td>
<td style="text-align: center;" >%s</td>
</tr>""" % (cgi.escape(name), cgi.escape(description), edit_link, members_link)
out += """
<tr class="mailboxfooter">
<td colspan="2">
<form name="newGroup" action="create?ln=%(ln)s" method="post">
<input type="submit" name="create_group" value="%(write_label)s" class="formbutton" />
</form>
</td>
<td> </td>
<td> </td>
<td> </td>
</tr>
</tbody>
</table>""" % {'ln': ln,
'write_label': _("Create new group"),
}
return out
def tmpl_display_member_groups(self, groups, ln=CFG_SITE_LANG):
"""
Display the groups the user is member of.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'groups' *list* - All the group the user is member of
"""
_ = gettext_set_language(ln)
group_text = self.tmpl_group_table_title(img="/img/webbasket_us.png", text=_("You are a member of the following groups:"))
group_text += """
<table class="mailbox">
<thead class="mailboxheader">
<tr class="inboxheader">
<td>%s</td>
<td>%s</td>
</tr>
</thead>
<tfoot>
<tr style="height:0px;">
<td></td>
<td></td>
</tr>
</tfoot>
<tbody class="mailboxbody">""" % (_("Group"), _("Description"))
if len(groups) == 0:
group_text += """
<tr class="mailboxrecord" style="height: 100px;">
<td colspan="2" style="text-align: center;">
<small>%s</small>
</td>
</tr>""" %(_("You are not a member of any groups."),)
for group_data in groups:
(id, name, description) = group_data
group_text += """
<tr class="mailboxrecord">
<td>%s</td>
<td>%s</td>
</tr>""" % (cgi.escape(name), cgi.escape(description))
group_text += """
<tr class="mailboxfooter">
<td>
<form name="newGroup" action="join?ln=%(ln)s" method="post">
<input type="submit" name="join_group" value="%(join_label)s" class="formbutton" />
</form>
</td>
<td>
<form name="newGroup" action="leave?ln=%(ln)s" method="post">
<input type="submit" name="leave" value="%(leave_label)s" class="formbutton" />
</form>
</td>
</tr>
</tbody>
</table>
""" % {'ln': ln,
'join_label': _("Join new group"),
'leave_label':_("Leave group")
}
return group_text
def tmpl_display_external_groups(self, groups, ln=CFG_SITE_LANG):
"""
Display the external groups the user is member of.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'groups' *list* - All the group the user is member of
"""
_ = gettext_set_language(ln)
group_text = self.tmpl_group_table_title(img="/img/webbasket_us.png", text=_("You are a member of the following external groups:"))
group_text += """
<table class="mailbox">
<thead class="mailboxheader">
<tr class="inboxheader">
<td>%s</td>
<td>%s</td>
</tr>
</thead>
<tfoot>
<tr style="height:0px;">
<td></td>
<td></td>
</tr>
</tfoot>
<tbody class="mailboxbody">""" % (_("Group"), _("Description"))
if len(groups) == 0:
group_text += """
<tr class="mailboxrecord" style="height: 100px;">
<td colspan="2" style="text-align: center;">
<small>%s</small>
</td>
</tr>""" %(_("You are not a member of any external groups."),)
for group_data in groups:
(id, name, description) = group_data
group_text += """
<tr class="mailboxrecord">
<td>%s</td>
<td>%s</td>
</tr>""" % (cgi.escape(name), cgi.escape(description))
group_text += """
</tbody>
</table>
"""
return group_text
def tmpl_display_input_group_info(self,
group_name,
group_description,
join_policy,
act_type="create",
grpID=None,
warnings=[],
ln=CFG_SITE_LANG):
"""
Display group data when creating or updating a group:
Name, description, join_policy.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'group_name' *string* - name of the group
- 'group_description' *string* - description of the group
- 'join_policy' *string* - join policy
- 'act_type' *string* - info about action : create or edit(update)
- 'grpID' *int* - ID of the group(not None in case of group editing)
- 'warnings' *list* - Display warning if values are not correct
"""
_ = gettext_set_language(ln)
#default
hidden_id =""
form_name = "create_group"
action = CFG_SITE_URL + '/yourgroups/create'
button_label = _("Create new group")
button_name = "create_button"
label = _("Create new group")
delete_text = ""
if act_type == "update":
form_name = "update_group"
action = CFG_SITE_URL + '/yourgroups/edit'
button_label = _("Update group")
button_name = "update"
label = _('Edit group %s') % cgi.escape(group_name)
delete_text = """<input type="submit" value="%s" class="formbutton" name="%s" />"""
delete_text %= (_("Delete group"),"delete")
if grpID is not None:
hidden_id = """<input type="hidden" name="grpID" value="%s" />"""
hidden_id %= grpID
out = self.tmpl_warning(warnings)
out += """
<form name="%(form_name)s" action="%(action)s" method="post">
<input type="hidden" name="ln" value="%(ln)s" />
<div style="padding:10px;">
<table class="bskbasket">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(logo)s" alt="%(label)s" />
</td>
<td class="bsktitle">
<b>%(label)s</b><br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
<td><label for="group_name">%(name_label)s</label></td>
<td>
<input type="text" name="group_name" id="group_name" value="%(group_name)s" />
</td>
</tr>
<tr>
<td><label for="group_description">%(description_label)s</label></td>
<td>
<input type="text" name="group_description" id="group_description" value="%(group_description)s" />
</td>
</tr>
<tr>
<td>%(join_policy_label)s</td>
<td>
%(join_policy)s
</td>
</tr>
</table>
</td>
</tr>
</tbody>
</table>
%(hidden_id)s
<table>
<tr>
<td>
<input type="submit" value="%(button_label)s" class="formbutton" name="%(button_name)s" />
</td>
<td>
%(delete_text)s
</td>
<td>
<input type="submit" value="%(cancel_label)s" class="formbutton" name="cancel" />
</td>
</tr>
</table>
</div>
</form>
"""
out %= {'action' : action,
'logo': CFG_SITE_URL + '/img/webbasket_create.png',
'label': label,
'form_name' : form_name,
'name_label': _("Group name:"),
'delete_text': delete_text,
'description_label': _("Group description:"),
'join_policy_label': _("Group join policy:"),
'group_name': cgi.escape(group_name, 1),
'group_description': cgi.escape(group_description, 1),
'button_label': button_label,
'button_name':button_name,
'cancel_label':_("Cancel"),
'hidden_id':hidden_id,
'ln': ln,
'join_policy' :self.__create_join_policy_selection_menu("join_policy",
join_policy,
ln)
}
return out
def tmpl_display_input_join_group(self,
group_list,
group_name,
group_from_search,
search,
warnings=[],
ln=CFG_SITE_LANG):
"""
Display the groups the user can join.
He can use default select list or the search box
Parameters:
- 'ln' *string* - The language to display the interface in
- 'group_list' *list* - All the group the user can join
- 'group_name' *string* - Name of the group the user is looking for
- 'group_from search' *list* - List of the group the user can join matching group_name
- 'search' *int* - User is looking for group using group_name
- 'warnings' *list* - Display warning if two group are selected
"""
_ = gettext_set_language(ln)
out = self.tmpl_warning(warnings)
search_content = ""
if search:
search_content = """<tr><td> </td><td>"""
if group_from_search != []:
search_content += self.__create_select_menu('grpID', group_from_search, _("Please select:"))
else:
search_content += _("No matching group")
search_content += """</td><td> </td></tr>"""
out += """
<form name="join_group" action="%(action)s" method="post">
<input type="hidden" name="ln" value="%(ln)s" />
<div style="padding:10px;">
<table class="bskbasket">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(logo)s" alt="%(label)s" />
</td>
<td class="bsktitle">
<b>%(label)s</b><br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
<td>%(list_label)s</td>
<td>
%(group_list)s
</td>
<td>
</td>
</tr>
<tr>
<td><br /><label for="group_name">%(label2)s</label></td>
<td><br /><input type="text" name="group_name" id="group_name" value="%(group_name)s" /></td>
<td><br />
<input type="submit" name="find_button" value="%(find_label)s" class="nonsubmitbutton" />
</td>
</tr>
%(search_content)s
</table>
</td>
</tr>
</tbody>
</table>
<table>
<tr>
<td>
<input type="submit" name="join_button" value="%(label)s" class="formbutton" />
</td>
<td>
<input type="submit" value="%(cancel_label)s" class="formbutton" name="cancel" />
</td>
</tr>
</table>
</div>
</form>
"""
out %= {'action' : CFG_SITE_URL + '/yourgroups/join',
'logo': CFG_SITE_URL + '/img/webbasket_create.png',
'label': _("Join group"),
'group_name': cgi.escape(group_name, 1),
'label2':_("or find it") + ': ',
'list_label':_("Choose group:"),
'ln': ln,
'find_label': _("Find group"),
'cancel_label':_("Cancel"),
'group_list' :self.__create_select_menu("grpID",group_list, _("Please select:")),
'search_content' : search_content
}
return out
def tmpl_display_manage_member(self,
grpID,
group_name,
members,
pending_members,
infos=[],
warnings=[],
ln=CFG_SITE_LANG):
"""Display current members and waiting members of a group.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'grpID *int* - ID of the group
- 'group_name' *string* - Name of the group
- 'members' *list* - List of the current members
- 'pending_members' *list* - List of the waiting members
- 'infos' *tuple of 2 lists* - Message to inform user about his last action
- 'warnings' *list* - Display warning if two group are selected
"""
_ = gettext_set_language(ln)
out = self.tmpl_warning(warnings)
out += self.tmpl_infobox(infos)
out += """
<form name="member" action="%(action)s" method="post">
<p>%(title)s</p>
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="grpID" value="%(grpID)s"/>
<table>
<tr>
<td>
<table class="bskbasket">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(imgurl)s/webbasket_usergroup.png" alt="%(img_alt_header1)s" />
</td>
<td class="bsktitle">
%(header1)s<br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
%(member_text)s
</tr>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td>
<table class="bskbasket">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(imgurl)s/webbasket_usergroup_gray.png" alt="%(img_alt_header2)s" />
</td>
<td class="bsktitle">
%(header2)s<br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
%(pending_text)s
</tr>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td>
<table class="bskbasket" style="width: 400px">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(imgurl)s/iconpen.gif" alt="%(img_alt_header3)s" />
</td>
<td class="bsktitle">
<b>%(header3)s</b><br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
<td colspan="2" style="padding: 0 5 10 5;">%(invite_text)s</td>
</tr>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td>
<input type="submit" value="%(cancel_label)s" class="formbutton" name="cancel" />
</td>
</tr>
</table>
</form>
"""
if members :
member_list = self.__create_select_menu("member_id", members, _("Please select:"))
member_text = """
<td style="padding: 0 5 10 5;">%s</td>
<td style="padding: 0 5 10 5;">
<input type="submit" name="remove_member" value="%s" class="nonsubmitbutton"/>
</td>""" % (member_list,_("Remove member"))
else :
member_text = """<td style="padding: 0 5 10 5;" colspan="2">%s</td>""" % _("No members.")
if pending_members :
pending_list = self.__create_select_menu("pending_member_id", pending_members, _("Please select:"))
pending_text = """
<td style="padding: 0 5 10 5;">%s</td>
<td style="padding: 0 5 10 5;">
<input type="submit" name="add_member" value="%s" class="nonsubmitbutton"/>
</td>
<td style="padding: 0 5 10 5;">
<input type="submit" name="reject_member" value="%s" class="nonsubmitbutton"/>
</td>""" % (pending_list,_("Accept member"), _("Reject member"))
else :
pending_text = """<td style="padding: 0 5 10 5;" colspan="2">%s</td>""" % _("No members awaiting approval.")
header1 = self.tmpl_group_table_title(text=_("Current members"))
header2 = self.tmpl_group_table_title(text=_("Members awaiting approval"))
header3 = _("Invite new members")
write_a_message_url = create_url(
"%s/yourmessages/write" % CFG_SITE_URL,
{
'ln' : ln,
'msg_subject' : _('Invitation to join "%s" group' % escape_html(group_name)),
'msg_body' : _("""\
Hello:
I think you might be interested in joining the group "%s".
You can join by clicking here: %s.
Best regards.
""") % (group_name, create_html_link("%s/yourgroups/join" % CFG_SITE_URL, {
'grpID' : grpID,
'join_button' : "1",
}, link_label=group_name, escape_urlargd=True, escape_linkattrd=True))})
link_open = '<a href="%s">' % escape_html(write_a_message_url)
invite_text = _("If you want to invite new members to join your group, please use the %(x_url_open)sweb message%(x_url_close)s system.") % \
{'x_url_open': link_open,
'x_url_close': '</a>'}
action = CFG_SITE_URL + '/yourgroups/members?ln=' + ln
out %= {'title':_('Group: %s') % escape_html(group_name),
'member_text' : member_text,
'pending_text' :pending_text,
'action':action,
'grpID':grpID,
'header1': header1,
'header2': header2,
'header3': header3,
'img_alt_header1': _("Current members"),
'img_alt_header2': _("Members awaiting approval"),
'img_alt_header3': _("Invite new members"),
'invite_text': invite_text,
'imgurl': CFG_SITE_URL + '/img',
'cancel_label':_("Cancel"),
'ln':ln
}
return out
def tmpl_display_input_leave_group(self,
groups,
warnings=[],
ln=CFG_SITE_LANG):
"""Display groups the user can leave.
Parameters:
- 'ln' *string* - The language to display the interface in
- 'groups' *list* - List of groups the user is currently member of
- 'warnings' *list* - Display warning if no group is selected
"""
_ = gettext_set_language(ln)
out = self.tmpl_warning(warnings)
out += """
<form name="leave" action="%(action)s" method="post">
<input type="hidden" name="ln" value="%(ln)s" />
<div style="padding:10px;">
<table class="bskbasket">
<thead class="bskbasketheader">
<tr>
<td class="bskactions">
<img src="%(logo)s" alt="%(label)s" />
</td>
<td class="bsktitle">
<b>%(label)s</b><br />
</td>
</tr>
</thead>
<tfoot>
<tr><td colspan="2"></td></tr>
</tfoot>
<tbody>
<tr>
<td colspan="2">
<table>
<tr>
<td>%(list_label)s</td>
<td>
%(groups)s
</td>
<td>
</td>
</tr>
</table>
</td>
</tr>
</tbody>
</table>
<table>
<tr>
<td>
%(submit)s
</td>
<td>
<input type="submit" value="%(cancel_label)s" class="formbutton" name="cancel" />
</td>
</tr>
</table>
</div>
</form>
"""
if groups:
groups = self.__create_select_menu("grpID", groups, _("Please select:"))
list_label = _("Group list")
submit = """<input type="submit" name="leave_button" value="%s" class="formbutton"/>""" % _("Leave group")
else :
groups = _("You are not member of any group.")
list_label = ""
submit = ""
action = CFG_SITE_URL + '/yourgroups/leave?ln=%s'
action %= (ln)
out %= {'groups' : groups,
'list_label' : list_label,
'action':action,
'logo': CFG_SITE_URL + '/img/webbasket_create.png',
'label' : _("Leave group"),
'cancel_label':_("Cancel"),
'ln' :ln,
'submit' : submit
}
return out
def tmpl_confirm_delete(self, grpID, ln=CFG_SITE_LANG):
"""
display a confirm message when deleting a group
@param grpID *int* - ID of the group
@param ln: language
@return: html output
"""
_ = gettext_set_language(ln)
action = CFG_SITE_URL + '/yourgroups/edit'
out = """
<form name="delete_group" action="%(action)s" method="post">
<table class="confirmoperation">
<tr>
<td colspan="2" class="confirmmessage">
%(message)s
</td>
</tr>
<tr>
<td>
<input type="hidden" name="confirmed" value="1" />
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="grpID" value="%(grpID)s" />
<input type="submit" name="delete" value="%(yes_label)s" class="formbutton" />
</td>
<td>
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="grpID" value="%(grpID)s" />
<input type="submit" value="%(no_label)s" class="formbutton" />
</td>
</tr>
</table>
</form>"""% {'message': _("Are you sure you want to delete this group?"),
'ln':ln,
'yes_label': _("Yes"),
'no_label': _("No"),
'grpID':grpID,
'action': action
}
return out
def tmpl_confirm_leave(self, uid, grpID, ln=CFG_SITE_LANG):
"""
display a confirm message
@param grpID *int* - ID of the group
@param ln: language
@return: html output
"""
_ = gettext_set_language(ln)
action = CFG_SITE_URL + '/yourgroups/leave'
out = """
<form name="leave_group" action="%(action)s" method="post">
<table class="confirmoperation">
<tr>
<td colspan="2" class="confirmmessage">
%(message)s
</td>
</tr>
<tr>
<td>
<input type="hidden" name="confirmed" value="1" />
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="grpID" value="%(grpID)s" />
<input type="submit" name="leave_button" value="%(yes_label)s" class="formbutton" />
</td>
<td>
<input type="hidden" name="ln" value="%(ln)s" />
<input type="hidden" name="grpID" value="%(grpID)s" />
<input type="submit" value="%(no_label)s" class="formbutton" />
</td>
</tr>
</table>
</form>"""% {'message': _("Are you sure you want to leave this group?"),
'ln':ln,
'yes_label': _("Yes"),
'no_label': _("No"),
'grpID':grpID,
'action': action
}
return out
def __create_join_policy_selection_menu(self, name, current_join_policy, ln=CFG_SITE_LANG):
"""Private function. create a drop down menu for selection of join policy
@param current_join_policy: join policy as defined in CFG_WEBSESSION_GROUP_JOIN_POLICY
@param ln: language
"""
_ = gettext_set_language(ln)
elements = [(CFG_WEBSESSION_GROUP_JOIN_POLICY['VISIBLEOPEN'],
_("Visible and open for new members")),
(CFG_WEBSESSION_GROUP_JOIN_POLICY['VISIBLEMAIL'],
_("Visible but new members need approval"))
]
select_text = _("Please select:")
return self.__create_select_menu(name, elements, select_text, selected_key=current_join_policy)
def __create_select_menu(self, name, elements, select_text, multiple=0, selected_key=None):
""" private function, returns a popup menu
@param name: name of HTML control
@param elements: list of (key, value)
"""
if multiple :
out = """
<select name="%s" multiple="multiple" style="width:100%%">"""% (name)
else :
out = """<select name="%s" style="width:100%%">""" % name
out += '<option value="-1">%s</option>' % (select_text)
for (key, label) in elements:
selected = ''
if key == selected_key:
selected = ' selected="selected"'
out += '<option value="%s"%s>%s</option>'% (key, selected, label)
out += '</select>'
return out
def tmpl_infobox(self, infos, ln=CFG_SITE_LANG):
"""Display len(infos) information fields
@param infos: list of strings
@param ln=language
@return: html output
"""
_ = gettext_set_language(ln)
if not((type(infos) is list) or (type(infos) is tuple)):
infos = [infos]
infobox = ""
for info in infos:
infobox += '<div><span class="info">'
lines = info.split("\n")
for line in lines[0:-1]:
infobox += line + "<br />\n"
infobox += lines[-1] + "</span></div>\n"
return infobox
def tmpl_navtrail(self, ln=CFG_SITE_LANG, title=""):
"""
display the navtrail, e.g.:
Your account > Your group > title
@param title: the last part of the navtrail. Is not a link
@param ln: language
return html formatted navtrail
"""
_ = gettext_set_language(ln)
nav_h1 = '<a class="navtrail" href="%s/youraccount/display">%s</a>'
nav_h2 = ""
if (title != ""):
nav_h2 = ' > <a class="navtrail" href="%s/yourgroups/display">%s</a>'
nav_h2 = nav_h2 % (CFG_SITE_URL, _("Your Groups"))
return nav_h1 % (CFG_SITE_URL, _("Your Account")) + nav_h2
def tmpl_group_table_title(self, img="", text="", ln=CFG_SITE_LANG):
"""
display the title of a table:
- 'img' *string* - img path
- 'text' *string* - title
- 'ln' *string* - The language to display the interface in
"""
out = "<div>"
if img:
out += """
<img src="%s" alt="" />
""" % (CFG_SITE_URL + img)
out += """
<b>%s</b>
</div>""" % text
return out
def tmpl_admin_msg(self, group_name, grpID, ln=CFG_SITE_LANG):
"""
return message content for joining group
- 'group_name' *string* - name of the group
- 'grpID' *int* - ID of the group
- 'ln' *string* - The language to display the interface in
"""
_ = gettext_set_language(ln)
subject = _("Group %s: New membership request") % group_name
url = CFG_SITE_URL + "/yourgroups/members?grpID=%s&ln=%s"
url %= (grpID, ln)
# FIXME: which user? We should show his nickname.
body = (_("A user wants to join the group %s.") % group_name) + '<br />'
body += _("Please %(x_url_open)saccept or reject%(x_url_close)s this user's request.") % {'x_url_open': '<a href="' + url + '">',
'x_url_close': '</a>'}
body += '<br />'
return subject, body
def tmpl_member_msg(self,
group_name,
accepted=0,
ln=CFG_SITE_LANG):
"""
return message content when new member is accepted/rejected
- 'group_name' *string* - name of the group
- 'accepted' *int* - 1 if new membership has been accepted, 0 if it has been rejected
- 'ln' *string* - The language to display the interface in
"""
_ = gettext_set_language(ln)
if accepted:
subject = _("Group %s: Join request has been accepted") % (group_name)
body = _("Your request for joining group %s has been accepted.") % (group_name)
else:
subject = _("Group %s: Join request has been rejected") % (group_name)
body = _("Your request for joining group %s has been rejected.") % (group_name)
url = CFG_SITE_URL + "/yourgroups/display?ln=" + ln
body += '<br />'
body += _("You can consult the list of %(x_url_open)syour groups%(x_url_close)s.") % {'x_url_open': '<a href="' + url + '">',
'x_url_close': '</a>'}
body += '<br />'
return subject, body
def tmpl_delete_msg(self,
group_name,
ln=CFG_SITE_LANG):
"""
return message content when new member is accepted/rejected
- 'group_name' *string* - name of the group
- 'ln' *string* - The language to display the interface in
"""
_ = gettext_set_language(ln)
subject = _("Group %s has been deleted") % group_name
url = CFG_SITE_URL + "/yourgroups/display?ln=" + ln
body = _("Group %s has been deleted by its administrator.") % group_name
body += '<br />'
body += _("You can consult the list of %(x_url_open)syour groups%(x_url_close)s.") % {'x_url_open': '<a href="' + url + '">',
'x_url_close': '</a>'}
body += '<br />'
return subject, body
def tmpl_group_info(self, nb_admin_groups=0, nb_member_groups=0, nb_total_groups=0, ln=CFG_SITE_LANG):
"""
display infos about groups (used by myaccount.py)
@param nb_admin_group: number of groups the user is admin of
@param nb_member_group: number of groups the user is member of
@param total_group: number of groups the user belongs to
@param ln: language
return: html output.
"""
_ = gettext_set_language(ln)
out = _("You can consult the list of %(x_url_open)s%(x_nb_total)i groups%(x_url_close)s you are subscribed to (%(x_nb_member)i) or administering (%(x_nb_admin)i).")
out %= {'x_url_open': '<a href="' + CFG_SITE_URL + '/yourgroups/display?ln=' + ln + '">',
'x_nb_total': nb_total_groups,
'x_url_close': '</a>',
'x_nb_admin': nb_admin_groups,
'x_nb_member': nb_member_groups}
return out
def tmpl_general_warnings(self, warning_list, ln=CFG_SITE_LANG):
"""
display information to the admin user about possible
ssecurity problems in the system.
"""
message = ""
_ = gettext_set_language(ln)
#Try and connect to the mysql database with the default invenio password
if "warning_mysql_password_equal_to_invenio_password" in warning_list:
message += "<p><font color=red>"
message += _("Warning : The password set for MySQL is the same as the default CDS-Invenio password. For security purposes, you might want to change the password.")
message += "</font></p>"
#Try and connect to the invenio database with the default invenio password
if "warning_invenio_password_equal_to_default" in warning_list:
message += "<p><font color=red>"
message += _("Warning : The password set for the CDS Invenio database is the same as the default CDS-Invenio password. For security purposes, you might want to change the password.")
message += "</font></p>"
#Check if the admin password is empty
if "warning_empty_admin_password" in warning_list:
message += "<p><font color=red>"
message += _("Warning : The password set for the CDS-Invenio admin user is currently empty. For security purposes, it is strongly recommended that you add a password.")
message += "</font></p>"
#Check if the admin email has been changed from the default
if "warning_site_support_email_equal_to_default" in warning_list:
message += "<p><font color=red>"
message += _("Warning : The email address set for support email is currently set to cds.support@cern.ch . It is recommended that you change this to change this to your own address.")
message += "</font></p>"
#Check for a new release
if "note_new_release_available" in warning_list:
message += "<p><font color=red>"
message += _("A newer version of CDS-Invenio is available for download. Please visit ")
message += "<a href=\"http://cdsware.cern.ch/invenio/download.html\">cdsware</a>"
message += "</font></p>"
#Error downloading release notes
if "error_cannot_download_release_notes" in warning_list:
message += "<p><font color=red>"
message += _("Cannot download release notes from http://cdsware.cern.ch/, please check your internet connection")
message += "</font></p>"
return message
|
lbjay/cds-invenio
|
modules/websession/lib/websession_templates.py
|
Python
|
gpl-2.0
| 102,861
|
[
"VisIt"
] |
b2f817050d48d484fa3794ce5f7c26d623416cfe377dcf9f4c9da1d767419a8e
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from builtins import zip
from builtins import range
from builtins import object
import itertools as it
import warnings
import numpy as np
from scipy.special import gammaln
try:
from bottleneck import nansum, nanmedian
except ImportError:
from numpy import nansum
try:
from numpy import nanmedian
except ImportError:
from scipy.stats import nanmedian
from scipy.stats.mstats import mquantiles
from . import _motion as mc
import sima.motion.frame_align
import sima.misc
from sima.motion import MotionEstimationStrategy
np.seterr(invalid='ignore', divide='ignore')
def _parse_granularity(granularity):
if isinstance(granularity, int):
return (granularity, 1)
elif isinstance(granularity, str):
return {'frame': (0, 1),
'plane': (1, 1),
'row': (2, 1),
'column': (3, 1)}[granularity]
elif isinstance(granularity, tuple):
return granularity
else:
raise TypeError(
'granularity must be of type str, int, or tuple of int')
def _pixel_distribution(dataset, tolerance=0.001, min_frames=1000):
"""Estimate the distribution of pixel intensities for each channel.
Parameters
----------
tolerance : float
The maximum relative error in the estimates that must be
achieved for termination.
min_frames: int
The minimum number of frames that must be evaluated before
termination.
Returns
-------
mean_est : array
Mean intensities of each channel.
var_est :
Variances of the intensity of each channel.
"""
# TODO: separate distributions for each plane
sums = np.zeros(dataset.frame_shape[-1]).astype(float)
sum_squares = np.zeros_like(sums)
counts = np.zeros_like(sums)
t = 0
for frame in it.chain.from_iterable(dataset):
for plane in frame:
if t > 0:
mean_est = sums / counts
var_est = (sum_squares / counts) - (mean_est ** 2)
if t > min_frames and np.all(
np.sqrt(var_est / counts) / mean_est < tolerance):
break
sums += np.nan_to_num(nansum(nansum(plane, axis=0), axis=0))
sum_squares += np.nan_to_num(
nansum(nansum(plane ** 2, axis=0), axis=0))
counts += np.isfinite(plane).sum(axis=0).sum(axis=0)
t += 1
assert np.all(mean_est > 0)
assert np.all(var_est > 0)
return mean_est, var_est
def _whole_frame_shifting(dataset, shifts):
"""Line up the data by the frame-shift estimates
Parameters
----------
shifts : array
DxT or DxTxP array with the estimated shifts for each frame/plane.
Returns
-------
reference : array
Time average of each channel after frame-by-frame alignment.
Size: (num_channels, num_rows, num_columns).
variances : array
Variance of each channel after frame-by-frame alignment.
Size: (num_channels, num_rows, num_columns)
offset : array
The displacement to add to each shift to align the minimal shift
with the edge of the corrected image.
"""
min_shifts = np.nanmin([np.nanmin(s.reshape(-1, s.shape[-1]), 0)
for s in shifts], 0)
assert np.all(min_shifts == 0)
max_shifts = np.nanmax([np.nanmax(s.reshape(-1, s.shape[-1]), 0)
for s in shifts], 0)
out_shape = list(dataset.frame_shape)
if len(min_shifts) == 2:
out_shape[1] += max_shifts[0] - min_shifts[0]
out_shape[2] += max_shifts[1] - min_shifts[1]
elif len(min_shifts) == 3:
for i in range(3):
out_shape[i] += max_shifts[i] - min_shifts[i]
else:
raise Exception
reference = np.zeros(out_shape)
sum_squares = np.zeros_like(reference)
count = np.zeros_like(reference)
for frame, shift in zip(it.chain.from_iterable(dataset),
it.chain.from_iterable(shifts)):
if shift.ndim == 1: # single shift for the whole volume
if any(x is np.ma.masked for x in shift):
continue
low = shift - min_shifts
high = shift + frame.shape[:-1]
reference[low[0]:high[0], low[1]:high[1], low[2]:high[2]] += \
np.nan_to_num(frame)
sum_squares[low[0]:high[0], low[1]:high[1], low[2]:high[2]] += \
np.nan_to_num(frame ** 2)
count[low[0]:high[0], low[1]:high[1], low[2]:high[2]] += \
np.isfinite(frame)
else: # plane-specific shifts
for plane, p_shifts, ref, ssq, cnt in zip(
frame, shift, reference, sum_squares, count):
if any(x is np.ma.masked for x in p_shifts):
continue
low = p_shifts - min_shifts # TOOD: NaN considerations
high = low + plane.shape[:-1]
ref[low[0]:high[0], low[1]:high[1]] += np.nan_to_num(plane)
ssq[low[0]:high[0], low[1]:high[1]] += np.nan_to_num(
plane ** 2)
cnt[low[0]:high[0], low[1]:high[1]] += np.isfinite(plane)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
reference /= count
assert np.all(np.isnan(reference[np.equal(count, 0)]))
variances = (sum_squares / count) - reference ** 2
assert not np.any(variances < 0)
return reference, variances
def _discrete_transition_prob(r, log_transition_probs, n):
"""Calculate the transition probability between two discrete position
states.
Parameters
----------
r : array
The location being transitioned to.
transition_probs : function
The continuous transition probability function.
n : int
The number of partitions along each axis.
Returns
-------
float
The discrete transition probability between the two states.
"""
def _log_add(a, b):
"""Add two log probabilities to get a new log probability.
Returns log(exp(a) + exp(b))
"""
m = min(a, b)
M = max(a, b)
if M == -np.inf:
return -np.inf
return M + np.log(1. + np.exp(m - M))
logp = - np.inf
for x in np.linspace(-1, 1, n + 2)[1:-1]:
for y in np.linspace(-1, 1, n + 2)[1:-1]:
if len(r) == 2:
logp = _log_add(log_transition_probs(r + np.array([y, x])) +
np.log(1 - abs(y)) + np.log(1 - abs(x)), logp)
else:
for z in np.linspace(-1, 1, n + 2)[1:-1]:
new_logp = _log_add(
log_transition_probs(r + np.array([z, y, x])) +
np.log(1 - abs(z)) + np.log(1 - abs(y)) +
np.log(1 - abs(x)), logp)
if not np.isnan(new_logp):
logp = new_logp
else:
raise Exception
return logp - len(r) * np.log(n)
def _threshold_gradient(im):
"""Indicate pixel locations with gradient below the bottom 10th percentile
Parameters
----------
im : array
The mean intensity images for each channel.
Size: (num_channels, num_rows, num_columns).
Returns
-------
array
Binary values indicating whether the magnitude of the gradient is below
the 10th percentile. Same size as im.
"""
if im.shape[0] > 1:
# Calculate directional relative derivatives
_, g_x, g_y = np.gradient(np.log(im))
else:
# Calculate directional relative derivatives
g_x, g_y = np.gradient(np.log(im[0]))
g_x = g_x.reshape([1, g_x.shape[0], g_x.shape[1]])
g_y = g_y.reshape([1, g_y.shape[0], g_y.shape[1]])
gradient_magnitudes = np.sqrt((g_x ** 2) + (g_y ** 2))
below_threshold = []
for chan in gradient_magnitudes:
threshold = mquantiles(chan[np.isfinite(chan)].flatten(), [0.1])[0]
below_threshold.append(chan < threshold)
return np.array(below_threshold)
def _initial_distribution(decay, noise_cov, mean_shift):
"""Get the initial distribution of the displacements."""
initial_cov = np.linalg.solve(np.diag([1, 1]) - decay * decay.T,
noise_cov.newbyteorder('>').byteswap())
for _ in range(1000):
initial_cov = decay * initial_cov * decay.T + noise_cov
# don't let C be singular
initial_cov[0, 0] = max(initial_cov[0, 0], 0.1)
initial_cov[1, 1] = max(initial_cov[1, 1], 0.1)
return lambda x: np.exp(
-0.5 * np.dot(
x - mean_shift, np.linalg.solve(initial_cov, x - mean_shift))
) / np.sqrt(2.0 * np.pi * np.linalg.det(initial_cov))
def _lookup_tables(position_bounds, log_markov_matrix):
"""Generate lookup tables to speed up the algorithm performance.
Parameters
----------
position_bounds : array of int
The minimum and maximum (+1) allowable coordinates.
step_bounds : array of int
The minimum and maximum (+1) allowable steps.
log_markov_matrix :
The log transition probabilities.
Returns
-------
position_tbl : array
Lookup table used to index each possible displacement.
transition_tbl : array
Lookup table used to find the indices of displacements to which
transitions can occur from the position.
log_markov_matrix_tbl : array
Lookup table used to find the transition probability of the transitions
from transition_tbl.
"""
position_tbl = np.array(
list(it.product(*[list(range(m, M))
for m, M in zip(*position_bounds)])),
dtype=int)
position_dict = {tuple(position): i
for i, position in enumerate(position_tbl)}
# create transition lookup and create lookup for transition probability
transition_tbl = []
log_markov_matrix_tbl = []
for step in it.product(
*[list(range(-s + 1, s)) for s in log_markov_matrix.shape]):
if len(step) == 2:
step = (0,) + step
tmp_tbl = []
for pos in position_tbl:
new_position = tuple(pos + np.array(step))
try:
tmp_tbl.append(position_dict[new_position])
except KeyError:
tmp_tbl.append(-1)
transition_tbl.append(tmp_tbl)
log_markov_matrix_tbl.append(
log_markov_matrix[tuple(abs(s) for s in step)])
transition_tbl = np.array(transition_tbl, dtype=int)
log_markov_matrix_tbl = np.fromiter(log_markov_matrix_tbl, dtype=float)
return position_tbl, transition_tbl, log_markov_matrix_tbl
def _backtrace(start_idx, backpointer, states, position_tbl):
"""Perform the backtracing stop of the Viterbi algorithm.
Parameters
----------
start_idx : int
...
Returns:
--------
trajectory : array
The maximum aposteriori trajectory of displacements.
Shape: (2, len(states))
"""
T = len(states)
dim = len(position_tbl[0])
i = start_idx
trajectory = np.zeros([T, dim], dtype=int)
trajectory[-1] = position_tbl[states[-1][i]]
for t in range(T - 2, -1, -1):
# NOTE: backpointer index 0 corresponds to second timestep
i = backpointer[t][i]
trajectory[t] = position_tbl[states[t][i]]
return trajectory
class _HiddenMarkov(MotionEstimationStrategy):
def __init__(self, granularity=2, num_states_retained=50,
max_displacement=None, n_processes=1, restarts=None,
verbose=True):
if isinstance(granularity, int) or isinstance(granularity, str):
granularity = (granularity, 1)
elif not isinstance(granularity, tuple):
raise TypeError(
'granularity must be of type str, int, or tuple')
if isinstance(granularity[0], str):
granularity = ({'frame': 0,
'plane': 1,
'row': 2,
'column': 3}[granularity[0]], granularity[1])
self._params = dict(locals())
del self._params['self']
def _neighbor_viterbi(
self, dataset, references, gains, movement_model,
min_displacements, max_displacements, pixel_means, pixel_variances,
max_step=1):
"""Estimate the MAP trajectory with the Viterbi Algorithm."""
assert references.ndim == 4
granularity = self._params['granularity']
scaled_refs = references / gains
displacement_tbl, transition_tbl, log_markov_tbl, = _lookup_tables(
[min_displacements, max_displacements + 1],
movement_model.log_transition_matrix(
max_distance=max_step,
dt=granularity[1] / np.prod(references.shape[:granularity[0]]))
)
assert displacement_tbl.dtype == int
tmp_states, log_p = movement_model.initial_probs(
displacement_tbl, min_displacements, max_displacements)
displacements = []
for i, sequence in enumerate(dataset):
if self._params['verbose']:
print('Estimating displacements for cycle ', i)
imdata = NormalizedIterator(sequence, gains, pixel_means,
pixel_variances, granularity)
positions = PositionIterator(sequence.shape[:-1], granularity)
restarts = self._params['restarts']
if restarts is not None:
restart_period = np.prod(
sequence.shape[(restarts + 1):(granularity[0] + 1)]
) // granularity[1]
else:
restart_period = None
disp = _beam_search(
imdata, positions,
it.repeat((transition_tbl, log_markov_tbl)), scaled_refs,
displacement_tbl, (tmp_states, log_p),
self._params['num_states_retained'], restart_period)
new_shape = sequence.shape[:granularity[0]] + \
(sequence.shape[granularity[0]] // granularity[1],) + \
(disp.shape[-1],)
displacements.append(np.repeat(disp.reshape(new_shape),
repeats=granularity[1],
axis=granularity[0]))
return displacements
def _estimate(self, dataset):
"""Estimate and save the displacements for the time series.
Parameters
----------
num_states_retained : int
Number of states to retain at each time step of the HMM.
max_displacement : array of int
The maximum allowed displacement magnitudes in [y,x].
Returns
-------
dict
The estimated displacements and partial results of motion
correction.
"""
params = self._params
if params['verbose']:
print('Estimating model parameters.')
shifts = self._estimate_shifts(dataset)
references, variances = _whole_frame_shifting(dataset, shifts)
if params['max_displacement'] is None:
max_displacement = np.array(dataset.frame_shape[:3]) // 2
else:
max_displacement = np.array(params['max_displacement'])
gains = nanmedian(
(variances / references).reshape(-1, references.shape[-1]))
if not (np.all(np.isfinite(gains)) and np.all(gains > 0)):
raise Exception('Failed to estimate positive gains')
pixel_means, pixel_variances = _pixel_distribution(dataset)
movement_model = MovementModel.estimate(shifts)
if shifts[0].shape[-1] == 2:
shifts = [np.concatenate([np.zeros(s.shape[:-1] + (1,), dtype=int),
s], axis=-1) for s in shifts]
min_shifts = np.nanmin([np.nanmin(s.reshape(-1, s.shape[-1]), 0)
for s in shifts], 0)
max_shifts = np.nanmax([np.nanmax(s.reshape(-1, s.shape[-1]), 0)
for s in shifts], 0)
# add a bit of extra room to move around
if max_displacement.size == 2:
max_displacement = np.hstack(([0], max_displacement))
extra_buffer = ((max_displacement - max_shifts + min_shifts) // 2
).astype(int)
min_displacements = min_shifts - extra_buffer
max_displacements = max_shifts + extra_buffer
displacements = self._neighbor_viterbi(
dataset, references, gains, movement_model, min_displacements,
max_displacements, pixel_means, pixel_variances)
return self._post_process(displacements)
def _post_process(self, displacements):
return displacements
class HiddenMarkov2D(_HiddenMarkov):
"""
Hidden Markov model (HMM) in two dimensions.
Parameters
----------
granularity : int, str, or tuple, optional
The granularity of the calculated displacements. A separate
displacement can be calculated for each frame (granularity=0
or granularity='frame'), each plane (1 or 'plane'), each
row (2 or 'row'), or pixel (3 or 'column'). As well, a separate
displacement can be calculated for every n consecutive elements
(e.g. granularity=('row', 8) for every 8 rows).
Defaults to one displacement per row.
num_states_retained : int, optional
Number of states to retain at each time step of the HMM.
Defaults to 50.
max_displacement : array of int, optional
The maximum allowed displacement magnitudes in [y,x]. By
default, arbitrarily large displacements are allowed.
n_processes : int, optional
Number of pool processes to spawn to parallelize frame alignment.
Defaults to 1.
restarts : int, optional
How often to reinitialize the hidden Markov model. This can be useful
if there are long breaks between frames or planes. Parameter values of
0 or 1 reinitialize the hidden states every frame or plane,
respectively. By default, the hidden distribution of positions is
never reinitialized during the sequence.
verbose : bool, optional
Whether to print information about progress.
References
----------
* Dombeck et al. 2007. Neuron. 56(1): 43-57.
* Kaifosh et al. 2013. Nature Neuroscience. 16(9): 1182-4.
"""
def _estimate_shifts(self, dataset):
return sima.motion.frame_align.PlaneTranslation2D(
self._params['max_displacement'],
n_processes=self._params['n_processes']).estimate(dataset)
def _post_process(self, displacements):
return [d[..., 1:] for d in displacements]
class MovementModel(object):
"""
Attributes
----------
mean_shift : array of int
The mean of the whole-frame displacement estimates
"""
def __init__(self, cov_matrix, U, s, mean_shift):
if not np.all(np.isfinite(cov_matrix)):
raise ValueError
assert np.linalg.det(cov_matrix) > 0
self._cov_matrix = cov_matrix
self._U = U
self._s = s
self.mean_shift = mean_shift
@classmethod
def estimate(cls, shifts, times=None):
"""Estimate the movement model from displacements.
Parameters
----------
shifts : list of ndarray
The shape of the ndarray may vary depending on whether
displacements are estimated per volume, per plane, per row, etc.
"""
# TODO: add mean value at boundaries to eliminate boundary effects
# between cycles
shifts = np.concatenate(shifts).reshape(-1, shifts[0].shape[-1])
if not shifts.shape[1] in (2, 3):
raise ValueError
mean_shift = np.nanmean(shifts, axis=0)
assert len(mean_shift) == shifts.shape[1]
centered_shifts = np.nan_to_num(shifts - mean_shift)
past = centered_shifts[:-1]
future = centered_shifts[1:]
past_future = np.dot(past.T, future)
past_past = np.dot(past.T, past)
idx = 0
D = shifts.shape[1]
n = D * (D + 1) // 2
y = np.zeros(n)
M = np.zeros((n, n))
for i in range(D): # loop over the dimensions of motion
for j in range(i + 1): # loop over all pairs of dimension
y[idx] = past_future[i, j] + past_future[j, i]
idx_2 = 0
for k in range(D):
for m in range(k + 1):
if k == i:
M[idx, idx_2] += past_past[j, m]
elif m == i:
M[idx, idx_2] += past_past[j, k]
if k == j:
M[idx, idx_2] += past_past[i, m]
elif m == j:
M[idx, idx_2] += past_past[i, k]
idx_2 += 1
idx += 1
coefficients = np.dot(np.linalg.pinv(M), y)
if D == 2:
A = np.array([[coefficients[0], coefficients[1]],
[coefficients[1], coefficients[2]]])
if D == 3:
A = np.array([[coefficients[0], coefficients[1], coefficients[3]],
[coefficients[1], coefficients[2], coefficients[4]],
[coefficients[3], coefficients[4], coefficients[5]]])
cov_matrix = np.cov(future.T - np.dot(A, past.T))
# make cov_matrix non-singular
Uc, sc, _ = np.linalg.svd(cov_matrix) # NOTE: U == V
sc = np.maximum(sc, 1. / len(shifts))
cov_matrix = np.dot(Uc, np.dot(np.diag(sc), Uc))
assert np.linalg.det(cov_matrix) > 0
U, s, _ = np.linalg.svd(A) # NOTE: U == V for positive definite A
s = np.minimum(s, 1.) # Don't allow negative decay, i.e. growth
return cls(cov_matrix, U, s, mean_shift)
def decay_matrix(self, dt=1.):
"""
Parameters
---------
dt : float
Returns
-------
mov_decay : array
The per-line decay-term in the AR(1) motion model
"""
decay_matrix = np.dot(self._U, np.dot(self._s ** dt, self._U))
if not np.all(np.isfinite(decay_matrix)):
raise Exception
return decay_matrix
def cov_matrix(self, dt=1.):
"""
Parameters
---------
dt : float
Returns
-------
mov_cov : array
The per-line covariance-term in the AR(1) motion model
"""
return self._cov_matrix * dt
def log_transition_matrix(self, max_distance=1, dt=1.):
"""
Gaussian Transition Probabilities
Parameters
----------
max_distance : int
dt : float
"""
cov_matrix = self.cov_matrix(dt)
assert np.linalg.det(cov_matrix) > 0
def log_transition_probs(x):
return -0.5 * (np.log(2 * np.pi * np.linalg.det(cov_matrix)) +
np.dot(x, np.linalg.solve(cov_matrix, x)))
log_transition_matrix = -np.inf * np.ones(
[max_distance + 1] * len(cov_matrix))
for disp in it.product(
*([list(range(max_distance + 1))] * len(cov_matrix))):
log_transition_matrix[disp] = _discrete_transition_prob(
disp, log_transition_probs, 20)
assert np.all(np.isfinite(log_transition_matrix))
if log_transition_matrix.ndim == 2:
log_transition_matrix = np.expand_dims(log_transition_matrix, 0)
return log_transition_matrix
def _initial_distribution(self):
"""Get the initial distribution of the displacements."""
decay = self.decay_matrix()
noise_cov = self.cov_matrix()
initial_cov = np.linalg.solve(
np.diag(np.ones(len(decay))) - decay * decay.T,
noise_cov.newbyteorder('>').byteswap())
for _ in range(1000):
initial_cov = decay * initial_cov * decay.T + noise_cov
# don't let C be singular
for i in range(len(initial_cov)):
initial_cov[i, i] = max(initial_cov[i, i], 0.1)
def idist(x):
if len(x) == 3 and len(initial_cov) == 2:
x = x[1:]
return np.exp(
-0.5 * np.dot(x - self.mean_shift,
np.linalg.solve(initial_cov, x - self.mean_shift)
)
) / np.sqrt(2.0 * np.pi * np.linalg.det(initial_cov))
assert np.isfinite(idist(self.mean_shift))
return idist
def initial_probs(self, displacement_tbl, min_displacements,
max_displacements):
"""Give the initial probabilities for a displacement table"""
initial_dist = self._initial_distribution()
states = []
log_p = []
for index, position in enumerate(displacement_tbl): # TODO parallelize
# check that the displacement is allowable
if np.all(min_displacements <= position) and np.all(
position <= max_displacements):
states.append(index)
# probability of initial displacement
log_p.append(np.log(initial_dist(position)))
if not np.any(np.isfinite(log_p)):
raise Exception
return np.array(states, dtype='int'), np.array(log_p)
class PositionIterator(object):
"""Position iterator
Parameters
----------
shape : tuple of int
(times, planes, rows, columns)
granularity
offset : tuple of int
(z, y, x) or (y, x)
Examples
--------
>>> from sima.motion.hmm import PositionIterator
>>> pi = PositionIterator((100, 5, 128, 256), 'frame')
>>> positions = next(iter(pi))
>>> positions.shape == (163840, 3)
True
>>> pi = PositionIterator((100, 5, 128, 256), 'plane')
>>> positions = next(iter(pi))
>>> positions.shape == (32768, 3)
True
Group two rows at a time
>>> pi = PositionIterator((100, 5, 128, 256), (2, 2), [10, 12])
>>> positions = next(iter(pi))
>>> positions.shape == (512, 3)
True
>>> pi = PositionIterator((100, 5, 128, 256), 'column', [3, 10, 12])
>>> positions = next(iter(pi))
"""
def __init__(self, shape, granularity, offset=None):
self.granularity = _parse_granularity(granularity)
self.shape = shape
if self.shape[self.granularity[0]] % self.granularity[1] != 0:
raise ValueError('granularity[1] must divide the frame shape '
'along dimension granularity[0]')
if offset is None:
self.offset = [0, 0, 0, 0]
else:
self.offset = ([0, 0, 0, 0] + list(offset))[-4:]
def __iter__(self):
shape = self.shape
granularity = self.granularity
offset = self.offset
def out(group):
"""Calculate a single iteration output"""
return np.array(list(it.chain.from_iterable(
(base + s for s in it.product(
*[range(o, o + x) for x, o in
zip(shape[(granularity[0] + 1):],
offset[(granularity[0] + 1):])]))
for base in group)))
if granularity[0] > 0 or granularity[1] == 1:
def cycle():
"""Iterator that produces one period/period of the output."""
base_iter = it.product(*[list(range(o, x + o)) for x, o in
zip(shape[1:(granularity[0] + 1)],
offset[1:(granularity[0] + 1)])])
for group in zip(*[base_iter] * granularity[1]):
yield out(group)
for positions in it.cycle(cycle()):
yield positions
else:
base_iter = it.product(*[list(range(o, x + o)) for x, o in
zip(shape[:(granularity[0] + 1)],
offset[:(granularity[0] + 1)])])
for group in zip(*[base_iter] * granularity[1]):
yield out([b[1:] for b in group])
def _beam_search(imdata, positions, transitions, references, state_table,
initial_dist, num_retained=50, restart_period=None):
"""Perform a beam search (modified Viterbi algorithm).
Parameters
----------
imdata : iterator of ndarray
The imaging data for each time step.
positions : iterator
The acquisition positions (e.g. position of scan-head) corresponding
to the imdata.
transitions : iterator of tuple ()
references : ndarray
state_table : ndarray
initial_dist : tuple
num_retained : int
"""
if state_table.shape[1] != 3:
raise ValueError
log_references = np.log(references)
backpointer = []
states = []
states.append(initial_dist[0])
log_p_old = initial_dist[1]
estimates = []
assert np.any(np.isfinite(log_p_old))
t = 0
for data, pos, trans in zip(imdata, positions, transitions):
transition_table, log_transition_probs = trans
tmp_states, log_p, tmp_backpointer = mc.transitions(
states[-1], log_transition_probs, log_p_old, state_table,
transition_table)
obs, log_obs_fac, log_obs_p = data
assert len(obs) == len(pos)
mc.log_observation_probabilities_generalized(
log_p, tmp_states, obs, log_obs_p, log_obs_fac,
references, log_references, pos, state_table)
if np.any(np.isfinite(log_p)):
log_p[np.isnan(log_p)] = -np.Inf # Remove NaNs to sort.
ix = np.argsort(-log_p)[0:num_retained] # Keep likely states.
states.append(tmp_states[ix])
log_p_old = log_p[ix] - log_p[ix[0]]
backpointer.append(tmp_backpointer[ix])
else:
# If none of the observation probabilities are finite,
# then use states from the previous timestep.
warnings.warn('No finite observation probabilities.')
states.append(states[-1])
backpointer.append(np.arange(num_retained))
# reinitialize if necessary
t += 1
if restart_period is not None and (t % restart_period) == 0:
end_state_idx = np.argmax(log_p_old)
estimates.append(_backtrace(end_state_idx, backpointer[1:],
states[1:], state_table))
states = [initial_dist[0]]
log_p_old = initial_dist[1]
if len(states) > 1:
end_state_idx = np.argmax(log_p_old)
estimates.append(_backtrace(end_state_idx, backpointer[1:],
states[1:], state_table))
return np.concatenate(estimates, axis=0)
class HiddenMarkov3D(_HiddenMarkov):
"""
Hidden Markov model (HMM) with displacements in three dimensions.
Parameters
----------
granularity : int, str, or tuple, optional
The granularity of the calculated displacements. A separate
displacement can be calculated for each frame (granularity=0
or granularity='frame'), each plane (1 or 'plane'), each
row (2 or 'row'), or pixel (3 or 'column'). As well, a separate
displacement can be calculated for every n consecutive elements
(e.g.\ granularity=('row', 8) for every 8 rows).
Defaults to one displacement per row.
num_states_retained : int, optional
Number of states to retain at each time step of the HMM.
Defaults to 50.
max_displacement : array of int, optional
The maximum allowed displacement magnitudes in [z, y,x]. By
default, arbitrarily large displacements are allowed.
n_processes : int, optional
Number of pool processes to spawn to parallelize frame alignment.
Defaults to 1.
restarts : int, optional
How often to reinitialize the hidden Markov model. This can be useful
if there are long breaks between frames or planes. Parameter values of
0 or 1 reinitialize the hidden states every frame or plane,
respectively. default, the hidden distribution of positions is never
reinitialized during the sequence.
verbose : bool, optional
Whether to print information about progress.
References
----------
* Dombeck et al. 2007. Neuron. 56(1): 43-57.
* Kaifosh et al. 2013. Nature Neuroscience. 16(9): 1182-4.
"""
def _estimate_shifts(self, dataset):
shifts = sima.motion.frame_align.VolumeTranslation(
self._params['max_displacement'], criterion=2.5).estimate(dataset)
assert all(np.all(s) >= 0 for s in shifts)
return shifts
class NormalizedIterator(object):
"""Generator of preprocessed frames for efficient computation.
Parameters
----------
sequence : sima.Sequence
gains : array
The photon-to-intensity gains for each channel.
pixel_means : array
The mean pixel intensities for each channel.
pixel_variances : array
The pixel intensity variance for each channel.
granularity : tuple of int
Yields
------
im : list of array
The estimated photon counts for each channel.
log_im_fac : list of array
The logarithm of the factorial of the photon counts in im.
log_im_p: list of array
The log likelihood of observing each pixel intensity (without
spatial information).
Examples
--------
Plane-wise iteration
>>> from sima.motion.hmm import NormalizedIterator
>>> it = NormalizedIterator(
... np.ones((100, 10, 6, 5, 2)), np.ones(2), np.ones(2),
... np.ones(2), 'plane')
>>> next(iter(it))[0].shape == (30, 2)
True
Row-wise iteration:
>>> it = NormalizedIterator(
... np.ones((100, 10, 6, 5, 2)), np.ones(2), np.ones(2),
... np.ones(2), 'row')
>>> next(iter(it))[0].shape == (5, 2)
True
"""
def __init__(self, sequence, gains, pixel_means, pixel_variances,
granularity):
self.sequence = sequence
self.gains = gains
self.pixel_means = pixel_means
self.pixel_variances = pixel_variances
self.granularity = _parse_granularity(granularity)
def __iter__(self):
means = self.pixel_means / self.gains
variances = self.pixel_variances / self.gains ** 2
for frame in self.sequence:
frame = frame.reshape(
int(np.prod(frame.shape[:self.granularity[0]])),
-1, frame.shape[-1])
for chunk in zip(*[iter(frame)] * self.granularity[1]):
im = np.concatenate(chunk, axis=0) / self.gains
# replace NaN pixels with the mean value for the channel
for ch_idx, ch_mean in enumerate(means):
im_nans = np.isnan(im[..., ch_idx])
im[..., ch_idx][im_nans] = ch_mean
assert(np.all(np.isfinite(im)))
log_im_fac = gammaln(im + 1) # take the log of the factorial
# probability of observing the pixels (ignoring reference)
log_im_p = -(im - means) ** 2 / (2 * variances) \
- 0.5 * np.log(2. * np.pi * variances)
assert(np.all(np.isfinite(log_im_fac)))
assert(np.all(np.isfinite(log_im_p)))
yield im, log_im_fac, log_im_p
|
jzaremba/sima
|
sima/motion/hmm.py
|
Python
|
gpl-2.0
| 35,830
|
[
"Gaussian",
"NEURON"
] |
6ddc234122c30b498a583decba4092da3b38bc9f5ecb679b45afac943959f900
|
import skimage.color
import skimage.measure
import skimage.transform
import skimage.filters
import skimage.morphology
import numpy as np
import io
from PIL import Image
class GameFrameError(BaseException):
pass
class GameFrame:
def __init__(self, frame_data, frame_variants=None, timestamp=None, **kwargs):
if isinstance(frame_data, bytes):
self.frame_bytes = frame_data
self.frame_array = None
elif isinstance(frame_data, np.ndarray):
self.frame_bytes = None
self.frame_array = frame_data
self.frame_variants = frame_variants or dict()
self.timestamp = timestamp
self.offset_x = kwargs.get("offset_x") or 0
self.offset_y = kwargs.get("offset_y") or 0
self.resize_order = kwargs.get("resize_order") or 1
@property
def frame(self):
return self.frame_array if self.frame_array is not None else self.frame_bytes
@property
def half_resolution_frame(self):
""" A quarter-sized version of the frame (half-width, half-height)"""
if "half" not in self.frame_variants:
self.frame_variants["half"] = self._to_half_resolution()
return self.frame_variants["half"]
@property
def quarter_resolution_frame(self):
""" A sixteenth-sized version of the frame (quarter-width, quarter-height)"""
if "quarter" not in self.frame_variants:
self.frame_variants["quarter"] = self._to_quarter_resolution()
return self.frame_variants["quarter"]
@property
def eighth_resolution_frame(self):
""" A 1/32-sized version of the frame (eighth-width, eighth-height)"""
if "eighth" not in self.frame_variants:
self.frame_variants["eighth"] = self._to_eighth_resolution()
return self.frame_variants["eighth"]
@property
def eighth_resolution_grayscale_frame(self):
""" A 1/32-sized, grayscale version of the frame (eighth-width, eighth-height)"""
if "eighth_grayscale" not in self.frame_variants:
self.frame_variants["eighth_grayscale"] = self._to_eighth_grayscale_resolution()
return self.frame_variants["eighth_grayscale"]
@property
def grayscale_frame(self):
""" A full-size grayscale version of the frame"""
if "grayscale" not in self.frame_variants:
self.frame_variants["grayscale"] = self._to_grayscale()
return self.frame_variants["grayscale"]
@property
def ssim_frame(self):
""" A 100x100 grayscale frame to be used for SSIM"""
if "ssim" not in self.frame_variants:
self.frame_variants["ssim"] = self._to_ssim()
return self.frame_variants["ssim"]
@property
def top_color(self):
height, width, channels = self.eighth_resolution_frame.shape
values, counts = np.unique(self.eighth_resolution_frame.reshape(width * height, channels), axis=0, return_counts=True)
return [int(i) for i in values[np.argsort(counts)[::-1][0]]]
def compare_ssim(self, previous_game_frame):
return skimage.measure.compare_ssim(previous_game_frame.ssim_frame, self.ssim_frame)
def difference(self, previous_game_frame):
current = skimage.filters.gaussian(self.grayscale_frame, 8)
previous = skimage.filters.gaussian(previous_game_frame.grayscale_frame, 8)
return current - previous
def to_pil(self):
return Image.fromarray(self.frame)
def to_png_bytes(self):
pil_frame = Image.fromarray(skimage.util.img_as_ubyte(self.frame))
if len(self.frame.shape) == 3:
pil_frame = pil_frame.convert("RGB")
png_frame = io.BytesIO()
pil_frame.save(png_frame, format="PNG", compress_level=3)
png_frame.seek(0)
return png_frame.read()
# TODO: Refactor Fraction of Resolution Frames...
def _to_half_resolution(self):
shape = (
self.frame_array.shape[0] // 2,
self.frame_array.shape[1] // 2
)
return np.array(skimage.transform.resize(self.frame_array, shape, mode="reflect", order=self.resize_order) * 255, dtype="uint8")
def _to_quarter_resolution(self):
shape = (
self.frame_array.shape[0] // 4,
self.frame_array.shape[1] // 4
)
return np.array(skimage.transform.resize(self.frame_array, shape, mode="reflect", order=self.resize_order) * 255, dtype="uint8")
def _to_eighth_resolution(self):
shape = (
self.frame_array.shape[0] // 8,
self.frame_array.shape[1] // 8
)
return np.array(skimage.transform.resize(self.frame_array, shape, mode="reflect", order=self.resize_order) * 255, dtype="uint8")
def _to_eighth_grayscale_resolution(self):
shape = (
self.frame_array.shape[0] // 8,
self.frame_array.shape[1] // 8
)
return np.array(skimage.transform.resize(self.grayscale_frame, shape, mode="reflect", order=self.resize_order) * 255, dtype="uint8")
def _to_grayscale(self):
return np.array(skimage.color.rgb2gray(self.frame_array) * 255, dtype="uint8")
def _to_ssim(self):
grayscale = self.grayscale_frame
return skimage.transform.resize(grayscale, (100, 100), mode="reflect", order=0)
|
SerpentAI/SerpentAI
|
serpent/game_frame.py
|
Python
|
mit
| 5,346
|
[
"Gaussian"
] |
0aedb8aedac9a05261f7e6b133c572ae7fa37d4528883d2c095b71cf473df7f5
|
import neurokernel.mpi_relaunch
import scipy.io as io
from libSpineML2NK import nk_executable
from libSpineML import smlExperiment
e = nk_executable.Executable('./experiment0.xml')
exp = e.bundle.experiments[0].Experiment[0]
ai = exp.AbstractInput[0]
mutant = io.loadmat('data/MutantBG6Data.mat')
m_input = mutant['recorded_input'][4000:36000,0]
net = e.bundle.networks[0]
pop = net.Population[0]
pop.Neuron.Property
test_params = False
if test_params:
for prop in pop.Neuron.Property:
if prop.name == 'Am':
prop.AbstractValue.value = 0.01
if prop.name == 'Bm':
prop.AbstractValue.value = -0.9979
# Saturate Input! Nothing below 1e-10
m_input[m_input < 1e-10] =1e-10
#Rewrite Input Dynamically from mutant data
ai.TimePointValue = [] # Get rid of default input
for time, inj in enumerate(m_input):
tp = smlExperiment.TimePointValueType(time=time,value=inj)
ai.add_TimePointValue(tp)
exp.Simulation.duration = len(m_input)/1000.0
e.execute()
|
AdamRTomkins/libSpineML2NK
|
libSpineML2NK/examples/Narx/Narx_Python/run.py
|
Python
|
gpl-3.0
| 1,011
|
[
"NEURON"
] |
343129243c493cdd974d8f9bdb720acf9e8de9240dcbdf9fe5699b17086603d8
|
#/*
# *
# * TuneIn Radio for XBMC.
# *
# * Copyright (C) 2013 Brian Hornsby
# *
# * This program is free software: you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation, either version 3 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program. If not, see <http://www.gnu.org/licenses/>.
# *
# */
import xbmc
import xbmcgui
import sys
import os
import urllib
import urllib2
import urlparse
import xbmcsettings as settings
import xbmcutils as utils
def cancel_progressdialog(progressdialog):
return progressdialog.iscanceled()
def update_progressdialog(addonsettings, progressdialog, downloadfile, bytes_so_far, chunk_size, total_size):
percent = float(bytes_so_far) / total_size
percent = int(round(percent * 99, 0))
progressdialog.update(percent, os.path.basename(downloadfile), addonsettings.get_string(4002) % (bytes_so_far, total_size))
def __download(url, path, addonsettings, progressdialog=None, chunk_size=8192, cancelhook=None, reporthook=None):
response = urllib2.urlopen(url)
downloadfile = os.path.join(
path, os.path.basename(urlparse.urlsplit(url)[2]))
total_size = response.info().getheader('Content-Length').strip()
total_size = int(total_size)
bytes_so_far = 0
result = True
if os.path.exists(downloadfile):
filename = os.path.basename(urlparse.urlsplit(url)[2])
if not utils.yesno(addonsettings.get_string(4000), addonsettings.get_string(4003) % filename, addonsettings.get_string(4006)):
xbmc.log('[XBMC Download] File already exists. Do not overwrite.',
xbmc.LOGINFO)
return (False, downloadfile)
file = open(downloadfile, 'wb')
while 1:
chunk = response.read(chunk_size)
bytes_so_far += len(chunk)
if not chunk:
break
if cancelhook and cancelhook(progressdialog):
xbmc.log(
'[XBMC Download] Download has been cancelled', xbmc.LOGINFO)
if os.path.exists(downloadfile):
os.remove(downloadfile)
result = False
break
file.write(chunk)
if reporthook:
reporthook(addonsettings, progressdialog,
downloadfile, bytes_so_far, chunk_size, total_size)
file.close()
return (result, downloadfile)
def download(url, downloadpath, addonid, background=False, debug=False):
if debug == True:
xbmc.log('[XBMC Download] download', xbmc.LOGDEBUG)
xbmc.log('[XBMC Download] url: %s' % url, xbmc.LOGDEBUG)
xbmc.log(
'[XBMC Download] downloadpath: %s' % downloadpath, xbmc.LOGDEBUG)
xbmc.log('[XBMC Download] addonid: %s' % addonid, xbmc.LOGDEBUG)
xbmc.log('[XBMC Download] background: %s' % background, xbmc.LOGDEBUG)
result = (False, '')
addonsettings = settings.Settings(addonid, sys.argv)
if background == False:
progressdialog = xbmcgui.DialogProgress()
progressdialog.create(addonsettings.get_string(4000))
progressdialog.update(0, addonsettings.get_string(4001))
if not os.path.exists(downloadpath):
os.makedirs(downloadpath)
try:
if background == False:
result = __download(url, downloadpath, addonsettings, progressdialog, cancelhook=cancel_progressdialog, reporthook=update_progressdialog)
else:
result = __download(url, downloadpath, addonsettings)
except urllib2.URLError, e:
xbmc.log('[XBMC Download] URLError: %s' % (e), xbmc.LOGERROR)
result = (False, None)
if background == False:
progressdialog.close()
elif result[0] == True:
filename = os.path.basename(urlparse.urlsplit(url)[2])
command = 'Notification(%s, %s)' % (addonsettings.get_string(
4000), (addonsettings.get_string(4004) % (filename)))
xbmc.executebuiltin(command)
else:
filename = os.path.basename(urlparse.urlsplit(url)[2])
command = 'Notification(%s, %s)' % (addonsettings.get_string(
4000), (addonsettings.get_string(4005) % (filename)))
xbmc.executebuiltin(command)
return result
if __name__ == '__main__':
result = download(sys.argv[1], urllib.unquote_plus(sys.argv[2]), sys.argv[
3], sys.argv[4] == 'True', sys.argv[5] == 'True')
|
SMALLplayer/smallplayer-image-creator
|
storage/.xbmc/addons/plugin.audio.tuneinradio.smallplayer/resources/lib/xbmcdownload.py
|
Python
|
gpl-2.0
| 4,770
|
[
"Brian"
] |
a21e5b4504e9e3cd4e520cb5019d0f8e4c4ef697772909312217ff3308c44c36
|
"""
Acceptance tests for the teams feature.
"""
import json
import random
import time
from dateutil.parser import parse
import ddt
from nose.plugins.attrib import attr
from selenium.common.exceptions import TimeoutException
from uuid import uuid4
from common.test.acceptance.tests.helpers import get_modal_alert, EventsTestMixin, UniqueCourseTest
from common.test.acceptance.fixtures import LMS_BASE_URL
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.fixtures.discussion import (
Thread,
MultipleThreadFixture
)
from common.test.acceptance.pages.lms.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.course_info import CourseInfoPage
from common.test.acceptance.pages.lms.learner_profile import LearnerProfilePage
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
from common.test.acceptance.pages.lms.teams import (
TeamsPage,
MyTeamsPage,
BrowseTopicsPage,
BrowseTeamsPage,
TeamManagementPage,
EditMembershipPage,
TeamPage
)
from common.test.acceptance.pages.common.utils import confirm_prompt
TOPICS_PER_PAGE = 12
class TeamsTabBase(EventsTestMixin, UniqueCourseTest):
"""Base class for Teams Tab tests"""
def setUp(self):
super(TeamsTabBase, self).setUp()
self.tab_nav = TabNavPage(self.browser)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.teams_page = TeamsPage(self.browser, self.course_id)
# TODO: Refactor so resetting events database is not necessary
self.reset_event_tracking()
def create_topics(self, num_topics):
"""Create `num_topics` test topics."""
return [{u"description": i, u"name": i, u"id": i} for i in map(str, xrange(num_topics))]
def create_teams(self, topic, num_teams, time_between_creation=0):
"""Create `num_teams` teams belonging to `topic`."""
teams = []
for i in xrange(num_teams):
team = {
'course_id': self.course_id,
'topic_id': topic['id'],
'name': 'Team {}'.format(i),
'description': 'Description {}'.format(i),
'language': 'aa',
'country': 'AF'
}
teams.append(self.post_team_data(team))
# Sadly, this sleep is necessary in order to ensure that
# sorting by last_activity_at works correctly when running
# in Jenkins.
time.sleep(time_between_creation)
return teams
def post_team_data(self, team_data):
"""Given a JSON representation of a team, post it to the server."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/teams/',
data=json.dumps(team_data),
headers=self.course_fixture.headers
)
self.assertEqual(response.status_code, 200)
return json.loads(response.text)
def create_memberships(self, num_memberships, team_id):
"""Create `num_memberships` users and assign them to `team_id`. The
last user created becomes the current user."""
memberships = []
for __ in xrange(num_memberships):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
memberships.append(user_info)
self.create_membership(user_info['username'], team_id)
#pylint: disable=attribute-defined-outside-init
self.user_info = memberships[-1]
return memberships
def create_membership(self, username, team_id):
"""Assign `username` to `team_id`."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/team_membership/',
data=json.dumps({'username': username, 'team_id': team_id}),
headers=self.course_fixture.headers
)
return json.loads(response.text)
def set_team_configuration(self, configuration, enroll_in_course=True, global_staff=False):
"""
Sets team configuration on the course and calls auto-auth on the user.
"""
#pylint: disable=attribute-defined-outside-init
self.course_fixture = CourseFixture(**self.course_info)
if configuration:
self.course_fixture.add_advanced_settings(
{u"teams_configuration": {u"value": configuration}}
)
self.course_fixture.install()
enroll_course_id = self.course_id if enroll_in_course else None
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, course_id=enroll_course_id, staff=global_staff).visit().user_info
self.course_info_page.visit()
def verify_teams_present(self, present):
"""
Verifies whether or not the teams tab is present. If it should be present, also
checks the text on the page (to ensure view is working).
"""
if present:
self.assertIn("Teams", self.tab_nav.tab_names)
self.teams_page.visit()
self.assertEqual(self.teams_page.active_tab(), 'browse')
else:
self.assertNotIn("Teams", self.tab_nav.tab_names)
def verify_teams(self, page, expected_teams):
"""Verify that the list of team cards on the current page match the expected teams in order."""
def assert_team_equal(expected_team, team_card_name, team_card_description):
"""
Helper to assert that a single team card has the expected name and
description.
"""
self.assertEqual(expected_team['name'], team_card_name)
self.assertEqual(expected_team['description'], team_card_description)
team_card_names = page.team_names
team_card_descriptions = page.team_descriptions
map(assert_team_equal, expected_teams, team_card_names, team_card_descriptions)
def verify_my_team_count(self, expected_number_of_teams):
""" Verify the number of teams shown on "My Team". """
# We are doing these operations on this top-level page object to avoid reloading the page.
self.teams_page.verify_my_team_count(expected_number_of_teams)
def only_team_events(self, event):
"""Filter out all non-team events."""
return event['event_type'].startswith('edx.team.')
@ddt.ddt
@attr(shard=5)
class TeamsTabTest(TeamsTabBase):
"""
Tests verifying when the Teams tab is present.
"""
def test_teams_not_enabled(self):
"""
Scenario: teams tab should not be present if no team configuration is set
Given I am enrolled in a course without team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(None)
self.verify_teams_present(False)
def test_teams_not_enabled_no_topics(self):
"""
Scenario: teams tab should not be present if team configuration does not specify topics
Given I am enrolled in a course with no topics in the team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": []})
self.verify_teams_present(False)
def test_teams_not_enabled_not_enrolled(self):
"""
Scenario: teams tab should not be present if student is not enrolled in the course
Given there is a course with team configuration and topics
And I am not enrolled in that course, and am not global staff
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False
)
self.verify_teams_present(False)
def test_teams_enabled(self):
"""
Scenario: teams tab should be present if user is enrolled in the course and it has team configuration
Given I am enrolled in a course with team configuration and topics
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(1)})
self.verify_teams_present(True)
def test_teams_enabled_global_staff(self):
"""
Scenario: teams tab should be present if user is not enrolled in the course, but is global staff
Given there is a course with team configuration
And I am not enrolled in that course, but am global staff
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False,
global_staff=True
)
self.verify_teams_present(True)
@ddt.data(
'topics/{topic_id}',
'topics/{topic_id}/search',
'teams/{topic_id}/{team_id}/edit-team',
'teams/{topic_id}/{team_id}'
)
def test_unauthorized_error_message(self, route):
"""Ensure that an error message is shown to the user if they attempt
to take an action which makes an AJAX request while not signed
in.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration(
{u'max_team_size': 10, u'topics': topics},
global_staff=True
)
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
self.browser.delete_cookie('sessionid')
url = self.browser.current_url.split('#')[0]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
)
)
)
self.teams_page.wait_for_ajax()
self.assertEqual(
self.teams_page.warning_message,
u"Your request could not be completed. Reload the page and try again."
)
@ddt.data(
('browse', '.topics-list'),
# TODO: find a reliable way to match the "My Teams" tab
# ('my-teams', 'div.teams-list'),
('teams/{topic_id}/{team_id}', 'div.discussion-module'),
('topics/{topic_id}/create-team', 'div.create-team-instructions'),
('topics/{topic_id}', '.teams-list'),
('not-a-real-route', 'div.warning')
)
@ddt.unpack
def test_url_routing(self, route, selector):
"""Ensure that navigating to a URL route correctly updates the page
content.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration({
u'max_team_size': 10,
u'topics': topics
})
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
# Get the base URL (the URL without any trailing fragment)
url = self.browser.current_url
fragment_index = url.find('#')
if fragment_index >= 0:
url = url[0:fragment_index]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
))
)
self.teams_page.wait_for_page()
self.teams_page.wait_for_ajax()
self.assertTrue(self.teams_page.q(css=selector).present)
self.assertTrue(self.teams_page.q(css=selector).visible)
@attr(shard=5)
class MyTeamsTest(TeamsTabBase):
"""
Tests for the "My Teams" tab of the Teams page.
"""
def setUp(self):
super(MyTeamsTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.my_teams_page = MyTeamsPage(self.browser, self.course_id)
self.page_viewed_event = {
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'my-teams',
'topic_id': None,
'team_id': None
}
}
def test_not_member_of_any_teams(self):
"""
Scenario: Visiting the My Teams page when user is not a member of any team should not display any teams.
Given I am enrolled in a course with a team configuration and a topic but am not a member of a team
When I visit the My Teams page
And I should see no teams
And I should see a message that I belong to no teams.
"""
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.assertEqual(len(self.my_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertEqual(
self.my_teams_page.q(css='.page-content-main').text,
[u'You are not currently a member of any team.']
)
def test_member_of_a_team(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am enrolled in a course with a team configuration and a topic and am a member of a team
When I visit the My Teams page
Then I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should not see a pagination footer
"""
teams = self.create_teams(self.topic, 1)
self.create_membership(self.user_info['username'], teams[0]['id'])
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.verify_teams(self.my_teams_page, teams)
def test_multiple_team_members(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am a member of a team with multiple members
When I visit the My Teams page
Then I should see the correct number of team members on my membership
"""
teams = self.create_teams(self.topic, 1)
self.create_memberships(4, teams[0]['id'])
self.my_teams_page.visit()
self.assertEqual(self.my_teams_page.team_memberships[0], '4 / 10 Members')
@attr(shard=5)
@ddt.ddt
class BrowseTopicsTest(TeamsTabBase):
"""
Tests for the Browse tab of the Teams page.
"""
def setUp(self):
super(BrowseTopicsTest, self).setUp()
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
@ddt.data(('name', False), ('team_count', True))
@ddt.unpack
def test_sort_topics(self, sort_order, reverse):
"""
Scenario: the user should be able to sort the list of topics by name or team count
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
When I choose a sort order
Then I should see the paginated list of topics in that order
"""
topics = self.create_topics(TOPICS_PER_PAGE + 1)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
for i, topic in enumerate(random.sample(topics, len(topics))):
self.create_teams(topic, i)
topic['team_count'] = i
self.topics_page.visit()
self.topics_page.sort_topics_by(sort_order)
topic_names = self.topics_page.topic_names
self.assertEqual(len(topic_names), TOPICS_PER_PAGE)
self.assertEqual(
topic_names,
[t['name'] for t in sorted(topics, key=lambda t: t[sort_order], reverse=reverse)][:TOPICS_PER_PAGE]
)
def test_sort_topics_update(self):
"""
Scenario: the list of topics should remain sorted after updates
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics and choose a sort order
Then I should see the paginated list of topics in that order
When I create a team in one of those topics
And I return to the topics list
Then I should see the topics in the correct sorted order
"""
topics = self.create_topics(3)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
self.topics_page.visit()
self.topics_page.sort_topics_by('team_count')
topic_name = self.topics_page.topic_names[-1]
topic = [t for t in topics if t['name'] == topic_name][0]
self.topics_page.browse_teams_for_topic(topic_name)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
self.assertTrue(browse_teams_page.is_browser_on_page())
browse_teams_page.click_create_team_link()
create_team_page = TeamManagementPage(self.browser, self.course_id, topic)
create_team_page.value_for_text_field(field_id='name', value='Team Name', press_enter=False)
create_team_page.set_value_for_textarea_field(
field_id='description',
value='Team description.'
)
create_team_page.submit_form()
team_page = TeamPage(self.browser, self.course_id)
self.assertTrue(team_page.is_browser_on_page())
team_page.click_all_topics()
self.assertTrue(self.topics_page.is_browser_on_page())
self.topics_page.wait_for_ajax()
self.assertEqual(topic_name, self.topics_page.topic_names[0])
def test_list_topics(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(2)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 2)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-2 out of 2 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_topic_pagination(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab, paginated 12 per page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see only the first 12 topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(20)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 20 total'))
self.assertTrue(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertTrue(self.topics_page.is_next_page_button_enabled())
def test_go_to_numbered_page(self):
"""
Scenario: topics should be able to be navigated by page number
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter a valid page number in the page number input
Then I should see that page of topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(25)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_go_to_invalid_page(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter an invalid page number in the page number input
Then I should stay on the current page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(self.topics_page.get_current_page_number(), 1)
def test_page_navigation_buttons(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
When I press the next page button
Then I should move to the next page
When I press the previous page button
Then I should move to the previous page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.press_next_page_button()
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 13-13 out of 13 total'))
self.topics_page.press_previous_page_button()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 13 total'))
def test_topic_pagination_one_page(self):
"""
Scenario: Browsing topics when there are fewer topics than the page size i.e. 12
all topics should show on one page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I should see corrected number of topic cards
And I should see the correct page header
And I should not see a pagination footer
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(10)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 10)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
def test_topic_description_truncation(self):
"""
Scenario: excessively long topic descriptions should be truncated so
as to fit within a topic card.
Given I am enrolled in a course with a team configuration and a topic
with a long description
When I visit the Teams page
And I browse topics
Then I should see a truncated topic description
"""
initial_description = "A" + " really" * 50 + " long description"
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [{"name": "", "id": "", "description": initial_description}]}
)
self.topics_page.visit()
truncated_description = self.topics_page.topic_descriptions[0]
self.assertLess(len(truncated_description), len(initial_description))
self.assertTrue(truncated_description.endswith('...'))
self.assertIn(truncated_description.split('...')[0], initial_description)
def test_go_to_teams_list(self):
"""
Scenario: Clicking on a Topic Card should take you to the
teams list for that Topic.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
And I browse topics
And I click on the arrow link to view teams for the first topic
Then I should be on the browse teams page
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
self.topics_page.visit()
self.topics_page.browse_teams_for_topic('Example Topic')
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
self.assertTrue(browse_teams_page.is_browser_on_page())
self.assertEqual(browse_teams_page.header_name, 'Example Topic')
self.assertEqual(browse_teams_page.header_description, 'Description')
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse topics page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the browse topics page
Then my browser should post a page viewed event
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'browse',
'topic_id': None,
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.topics_page.visit()
@attr(shard=5)
@ddt.ddt
class BrowseTeamsWithinTopicTest(TeamsTabBase):
"""
Tests for browsing Teams within a Topic on the Teams page.
"""
TEAMS_PAGE_SIZE = 10
def setUp(self):
super(BrowseTeamsWithinTopicTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.max_team_size = 10
self.set_team_configuration({
'course_id': self.course_id,
'max_team_size': self.max_team_size,
'topics': [self.topic]
})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
def teams_with_default_sort_order(self, teams):
"""Return a list of teams sorted according to the default ordering
(last_activity_at, with a secondary sort by open slots).
"""
return sorted(
sorted(teams, key=lambda t: len(t['membership']), reverse=True),
key=lambda t: parse(t['last_activity_at']).replace(microsecond=0),
reverse=True
)
def verify_page_header(self):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(self.browse_teams_page.header_name, self.topic['name'])
self.assertEqual(self.browse_teams_page.header_description, self.topic['description'])
def verify_search_header(self, search_results_page, search_query):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(search_results_page.header_name, 'Team Search')
self.assertEqual(
search_results_page.header_description,
'Showing results for "{search_query}"'.format(search_query=search_query)
)
def verify_on_page(self, teams_page, page_num, total_teams, pagination_header_text, footer_visible):
"""
Verify that we are on the correct team list page.
Arguments:
teams_page (BaseTeamsPage): The teams page object that should be the current page.
page_num (int): The one-indexed page number that we expect to be on
total_teams (list): An unsorted list of all the teams for the
current topic
pagination_header_text (str): Text we expect to see in the
pagination header.
footer_visible (bool): Whether we expect to see the pagination
footer controls.
"""
sorted_teams = self.teams_with_default_sort_order(total_teams)
self.assertTrue(teams_page.get_pagination_header_text().startswith(pagination_header_text))
self.verify_teams(
teams_page,
sorted_teams[(page_num - 1) * self.TEAMS_PAGE_SIZE:page_num * self.TEAMS_PAGE_SIZE]
)
self.assertEqual(
teams_page.pagination_controls_visible(),
footer_visible,
msg='Expected paging footer to be ' + 'visible' if footer_visible else 'invisible'
)
@ddt.data(
('open_slots', 'last_activity_at', True),
('last_activity_at', 'open_slots', True)
)
@ddt.unpack
def test_sort_teams(self, sort_order, secondary_sort_order, reverse):
"""
Scenario: the user should be able to sort the list of teams by open slots or last activity
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic
When I choose a sort order
Then I should see the paginated list of teams in that order
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
for i, team in enumerate(random.sample(teams, len(teams))):
for _ in range(i):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
self.create_membership(user_info['username'], team['id'])
team['open_slots'] = self.max_team_size - i
# Re-authenticate as staff after creating users
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=True
).visit()
self.browse_teams_page.visit()
self.browse_teams_page.sort_teams_by(sort_order)
team_names = self.browse_teams_page.team_names
self.assertEqual(len(team_names), self.TEAMS_PAGE_SIZE)
sorted_teams = [
team['name']
for team in sorted(
sorted(teams, key=lambda t: t[secondary_sort_order], reverse=reverse),
key=lambda t: t[sort_order],
reverse=reverse
)
][:self.TEAMS_PAGE_SIZE]
self.assertEqual(team_names, sorted_teams)
def test_default_sort_order(self):
"""
Scenario: the list of teams should be sorted by last activity by default
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic, sorted by last activity
"""
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
self.browse_teams_page.visit()
self.assertEqual(self.browse_teams_page.sort_order, 'last activity')
def test_no_teams(self):
"""
Scenario: Visiting a topic with no teams should not display any teams.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing no teams
And I should see no teams
And I should see a button to add a team
And I should not see a pagination footer
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.assertEqual(len(self.browse_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_one_page(self):
"""
Scenario: Visiting a topic with fewer teams than the page size should
all those teams on one page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should see a button to add a team
And I should not see a pagination footer
"""
teams = self.teams_with_default_sort_order(
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE, time_between_creation=1)
)
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.verify_teams(self.browse_teams_page, teams)
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_navigation_buttons(self):
"""
Scenario: The user should be able to page through a topic's team list
using navigation buttons when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I click on the next page button
Then I should see that I am on the second page of results
And when I click on the previous page button
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
self.browse_teams_page.press_next_page_button()
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-11 out of 11 total', True)
self.browse_teams_page.press_previous_page_button()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
def test_teams_page_input(self):
"""
Scenario: The user should be able to page through a topic's team list
using the page input when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I input the second page
Then I should see that I am on the second page of results
When I input the first page
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 10, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
self.browse_teams_page.go_to_page(2)
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-20 out of 20 total', True)
self.browse_teams_page.go_to_page(1)
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
def test_browse_team_topics(self):
"""
Scenario: User should be able to navigate to "browse all teams" and "search team description" links.
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic
Then I should see the correct page header
And I should see the link to "browse teams in other topics"
When I should navigate to that link
Then I should see the topic browse page
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.browse_teams_page.click_browse_all_teams_link()
self.assertTrue(self.topics_page.is_browser_on_page())
def test_search(self):
"""
Scenario: User should be able to search for a team
Given I am enrolled in a course with teams enabled
When I visit the Teams page for that topic
And I search for 'banana'
Then I should see the search result page
And the search header should be shown
And 0 results should be shown
And my browser should fire a page viewed event for the search page
And a searched event should have been fired
"""
# Note: all searches will return 0 results with the mock search server
# used by Bok Choy.
search_text = 'banana'
self.create_teams(self.topic, 5)
self.browse_teams_page.visit()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'search-teams',
'topic_id': self.topic['id'],
'team_id': None
}
}, {
'event_type': 'edx.team.searched',
'event': {
'search_text': search_text,
'topic_id': self.topic['id'],
'number_of_results': 0
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events, in_order=False):
search_results_page = self.browse_teams_page.search(search_text)
self.verify_search_header(search_results_page, search_text)
self.assertTrue(search_results_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
Then my browser should post a page viewed event for the teams page
"""
self.create_teams(self.topic, 5)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-topic',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.browse_teams_page.visit()
def test_team_name_xss(self):
"""
Scenario: Team names should be HTML-escaped on the teams page
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic, with a team name containing JS code
Then I should not see any alerts
"""
self.post_team_data({
'course_id': self.course_id,
'topic_id': self.topic['id'],
'name': '<script>alert("XSS")</script>',
'description': 'Description',
'language': 'aa',
'country': 'AF'
})
with self.assertRaises(TimeoutException):
self.browser.get(self.browse_teams_page.url)
alert = get_modal_alert(self.browser)
alert.accept()
@attr(shard=5)
class TeamFormActions(TeamsTabBase):
"""
Base class for create, edit, and delete team.
"""
TEAM_DESCRIPTION = 'The Avengers are a fictional team of superheroes.'
topic = {'name': 'Example Topic', 'id': 'example_topic', 'description': 'Description'}
TEAMS_NAME = 'Avengers'
def setUp(self):
super(TeamFormActions, self).setUp()
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
def verify_page_header(self, title, description, breadcrumbs):
"""
Verify that the page header correctly reflects the
create team header, description and breadcrumb.
"""
self.assertEqual(self.team_management_page.header_page_name, title)
self.assertEqual(self.team_management_page.header_page_description, description)
self.assertEqual(self.team_management_page.header_page_breadcrumbs, breadcrumbs)
def verify_and_navigate_to_create_team_page(self):
"""Navigates to the create team page and verifies."""
self.browse_teams_page.click_create_team_link()
self.verify_page_header(
title='Create a New Team',
description='Create a new team if you can\'t find an existing team to join, '
'or if you would like to learn with friends you know.',
breadcrumbs='All Topics {topic_name}'.format(topic_name=self.topic['name'])
)
def verify_and_navigate_to_edit_team_page(self):
"""Navigates to the edit team page and verifies."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, self.team['name'])
self.assertTrue(self.team_page.edit_team_button_present)
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
# Edit page header.
self.verify_page_header(
title='Edit Team',
description='If you make significant changes, make sure you notify '
'members of the team before making these changes.',
breadcrumbs='All Topics {topic_name} {team_name}'.format(
topic_name=self.topic['name'],
team_name=self.team['name']
)
)
def verify_team_info(self, name, description, location, language):
"""Verify the team information on team page."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, name)
self.assertEqual(self.team_page.team_description, description)
self.assertEqual(self.team_page.team_location, location)
self.assertEqual(self.team_page.team_language, language)
def fill_create_or_edit_form(self):
"""Fill the create/edit team form fields with appropriate values."""
self.team_management_page.value_for_text_field(
field_id='name',
value=self.TEAMS_NAME,
press_enter=False
)
self.team_management_page.set_value_for_textarea_field(
field_id='description',
value=self.TEAM_DESCRIPTION
)
self.team_management_page.value_for_dropdown_field(field_id='language', value='English')
self.team_management_page.value_for_dropdown_field(field_id='country', value='Pakistan')
def verify_all_fields_exist(self):
"""
Verify the fields for create/edit page.
"""
self.assertEqual(
self.team_management_page.message_for_field('name'),
'A name that identifies your team (maximum 255 characters).'
)
self.assertEqual(
self.team_management_page.message_for_textarea_field('description'),
'A short description of the team to help other learners understand '
'the goals or direction of the team (maximum 300 characters).'
)
self.assertEqual(
self.team_management_page.message_for_field('country'),
'The country that team members primarily identify with.'
)
self.assertEqual(
self.team_management_page.message_for_field('language'),
'The language that team members primarily use to communicate with each other.'
)
@ddt.ddt
class CreateTeamTest(TeamFormActions):
"""
Tests for creating a new Team within a Topic on the Teams page.
"""
def setUp(self):
super(CreateTeamTest, self).setUp()
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.browse_teams_page.visit()
def test_user_can_see_create_team_page(self):
"""
Scenario: The user should be able to see the create team page via teams list page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the Create Team page link on bottom
And When I click create team link
Then I should see the create team page.
And I should see the create team header
And I should also see the help messages for fields.
"""
self.verify_and_navigate_to_create_team_page()
self.verify_all_fields_exist()
def test_user_can_see_error_message_for_missing_data(self):
"""
Scenario: The user should be able to see error message in case of missing required field.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
And When I click create team button without filling required fields
Then I should see the error message and highlighted fields.
"""
self.verify_and_navigate_to_create_team_page()
self.team_management_page.submit_form()
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
self.assertTrue(self.team_management_page.error_for_field(field_id='description'))
def test_user_can_see_error_message_for_incorrect_data(self):
"""
Scenario: The user should be able to see error message in case of increasing length for required fields.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I add text > than 255 characters for name field
And I click Create button
Then I should see the error message for exceeding length.
"""
self.verify_and_navigate_to_create_team_page()
# Fill the name field with >255 characters to see validation message.
self.team_management_page.value_for_text_field(
field_id='name',
value='EdX is a massive open online course (MOOC) provider and online learning platform. '
'It hosts online university-level courses in a wide range of disciplines to a worldwide '
'audience, some at no charge. It also conducts research into learning based on how '
'people use its platform. EdX was created for students and institutions that seek to'
'transform themselves through cutting-edge technologies, innovative pedagogy, and '
'rigorous courses. More than 70 schools, nonprofits, corporations, and international'
'organizations offer or plan to offer courses on the edX website. As of 22 October 2014,'
'edX has more than 4 million users taking more than 500 courses online.',
press_enter=False
)
self.team_management_page.submit_form()
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
def test_user_can_create_new_team_successfully(self):
"""
Scenario: The user should be able to create new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I fill all the fields present with appropriate data
And I click Create button
Then I expect analytics events to be emitted
And I should see the page for my team
And I should see the message that says "You are member of this team"
And the new team should be added to the list of teams within the topic
And the number of teams should be updated on the topic card
And if I switch to "My Team", the newly created team is displayed
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.browse_teams_page.visit()
self.verify_and_navigate_to_create_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.created'
},
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'added_on_create',
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_management_page.submit_form()
# Verify that the page is shown for the new team
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
self.assertEqual(team_page.team_name, self.TEAMS_NAME)
self.assertEqual(team_page.team_description, self.TEAM_DESCRIPTION)
self.assertEqual(team_page.team_user_membership_text, 'You are a member of this team.')
# Verify the new team was added to the topic list
self.teams_page.click_specific_topic("Example Topic")
self.teams_page.verify_topic_team_count(1)
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(1)
# Verify that if one switches to "My Team" without reloading the page, the newly created team is shown.
self.verify_my_team_count(1)
def test_user_can_cancel_the_team_creation(self):
"""
Scenario: The user should be able to cancel the creation of new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I click Cancel button
Then I should see teams list page without any new team.
And if I switch to "My Team", it shows no teams
"""
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.verify_and_navigate_to_create_team_page()
self.team_management_page.cancel_team()
self.assertTrue(self.browse_teams_page.is_browser_on_page())
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(0)
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the create team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the create team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'new-team',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_create_team_page()
@ddt.ddt
class DeleteTeamTest(TeamFormActions):
"""
Tests for deleting teams.
"""
def setUp(self):
super(DeleteTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
#need to have a membership to confirm it gets deleted as well
self.create_membership(self.user_info['username'], self.team['id'])
self.team_page.visit()
def test_cancel_delete(self):
"""
Scenario: The user should be able to cancel the Delete Team dialog
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I cancel the prompt
And I refresh the page
Then I should still see the team
"""
self.delete_team(cancel=True)
self.assertTrue(self.team_management_page.is_browser_on_page())
self.browser.refresh()
self.team_management_page.wait_for_page()
self.assertEqual(
' '.join(('All Topics', self.topic['name'], self.team['name'])),
self.team_management_page.header_page_breadcrumbs
)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_delete_team(self, role):
"""
Scenario: The user should be able to see and navigate to the delete team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I confirm the prompt
Then I should see the browse teams page
And the team should not be present
"""
# If role is None, remain logged in as global staff
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.delete_team(require_notification=False)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.assertTrue(browse_teams_page.is_browser_on_page())
self.assertNotIn(self.team['name'], browse_teams_page.team_names)
def delete_team(self, **kwargs):
"""
Delete a team. Passes `kwargs` to `confirm_prompt`.
Expects edx.team.deleted event to be emitted, with correct course_id.
Also expects edx.team.learner_removed event to be emitted for the
membership that is removed as a part of the delete operation.
"""
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.team_management_page.delete_team_button.click()
if 'cancel' in kwargs and kwargs['cancel'] is True:
confirm_prompt(self.team_management_page, **kwargs)
else:
expected_events = [
{
'event_type': 'edx.team.deleted',
'event': {
'team_id': self.team['id']
}
},
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'team_deleted',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
confirm_prompt(self.team_management_page, **kwargs)
def test_delete_team_updates_topics(self):
"""
Scenario: Deleting a team should update the team count on the topics page
Given I am staff user for a course with a team
And I delete a team
When I navigate to the browse topics page
Then the team count for the deletd team's topic should be updated
"""
self.delete_team(require_notification=False)
BrowseTeamsPage(self.browser, self.course_id, self.topic).click_all_topics()
topics_page = BrowseTopicsPage(self.browser, self.course_id)
self.assertTrue(topics_page.is_browser_on_page())
self.teams_page.verify_topic_team_count(0)
@ddt.ddt
class EditTeamTest(TeamFormActions):
"""
Tests for editing the team.
"""
def setUp(self):
super(EditTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
self.team_page.visit()
def test_staff_can_navigate_to_edit_team_page(self):
"""
Scenario: The user should be able to see and navigate to the edit team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And I should see the edit team header
And I should also see the help messages for fields
"""
self.verify_and_navigate_to_edit_team_page()
self.verify_all_fields_exist()
def test_staff_can_edit_team_successfully(self):
"""
Scenario: The staff should be able to edit team successfully.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And an analytics event should be fired
When I edit all the fields with appropriate data
And I click Update button
Then I should see the page for my team with updated data
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'country',
'old': 'AF',
'new': 'PK',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'name',
'old': self.team['name'],
'new': self.TEAMS_NAME,
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'language',
'old': 'aa',
'new': 'en',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'description',
'old': self.team['description'],
'new': self.TEAM_DESCRIPTION,
'truncated': [],
}
},
]
with self.assert_events_match_during(
event_filter=self.only_team_events,
expected_events=expected_events,
):
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_staff_can_cancel_the_team_edit(self):
"""
Scenario: The user should be able to cancel the editing of team.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
Then I should see the Edit Team header
When I click Cancel button
Then I should see team page page without changes.
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.cancel_team()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
def test_student_cannot_see_edit_button(self):
"""
Scenario: The student should not see the edit team button.
Given I am student for a course with a team
When I visit the Team profile page
Then I should not see the Edit Team button
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page.visit()
self.assertFalse(self.team_page.edit_team_button_present)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged_user_can_edit_team(self, role):
"""
Scenario: The user with specified role should see the edit team button.
Given I am user with privileged role for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
"""
kwargs = {
'course_id': self.course_id,
'staff': False
}
if role is not None:
kwargs['roles'] = role
AutoAuthPage(self.browser, **kwargs).visit()
self.team_page.visit()
self.teams_page.wait_for_page()
self.assertTrue(self.team_page.edit_team_button_present)
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_page_viewed_event(self):
"""
Scenario: Visiting the edit team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the edit team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'edit-team',
'topic_id': self.topic['id'],
'team_id': self.team['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_edit_team_page()
@ddt.ddt
class EditMembershipTest(TeamFormActions):
"""
Tests for administrating from the team membership page
"""
def setUp(self):
super(EditMembershipTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
self.team = self.create_teams(self.topic, num_teams=1)[0]
#make sure a user exists on this team so we can edit the membership
self.create_membership(self.user_info['username'], self.team['id'])
self.edit_membership_page = EditMembershipPage(self.browser, self.course_id, self.team)
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
def edit_membership_helper(self, role, cancel=False):
"""
Helper for common functionality in edit membership tests.
Checks for all relevant assertions about membership being removed,
including verify edx.team.learner_removed events are emitted.
"""
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.assertTrue(
self.team_management_page.membership_button_present
)
self.team_management_page.click_membership_button()
self.edit_membership_page.wait_for_page()
self.edit_membership_page.click_first_remove()
if cancel:
self.edit_membership_page.cancel_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 1)
else:
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'removed_by_admin',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
self.edit_membership_page.confirm_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 0)
self.assertTrue(self.edit_membership_page.is_browser_on_page)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and confirm the dialog
Then my membership should be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=False)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_cancel_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and cancel the dialog
Then my membership should not be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=True)
@attr(shard=5)
@ddt.ddt
class TeamPageTest(TeamsTabBase):
"""Tests for viewing a specific team"""
SEND_INVITE_TEXT = 'Send this link to friends so that they can join too.'
def setUp(self):
super(TeamPageTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
def _set_team_configuration_and_membership(
self,
max_team_size=10,
membership_team_index=0,
visit_team_index=0,
create_membership=True,
another_user=False):
"""
Set team configuration.
Arguments:
max_team_size (int): number of users a team can have
membership_team_index (int): index of team user will join
visit_team_index (int): index of team user will visit
create_membership (bool): whether to create membership or not
another_user (bool): another user to visit a team
"""
#pylint: disable=attribute-defined-outside-init
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': max_team_size, 'topics': [self.topic]}
)
self.teams = self.create_teams(self.topic, 2)
if create_membership:
self.create_membership(self.user_info['username'], self.teams[membership_team_index]['id'])
if another_user:
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page = TeamPage(self.browser, self.course_id, self.teams[visit_team_index])
def setup_thread(self):
"""
Create and return a thread for this test's discussion topic.
"""
thread = Thread(
id="test_thread_{}".format(uuid4().hex),
commentable_id=self.teams[0]['discussion_topic_id'],
body="Dummy text body."
)
thread_fixture = MultipleThreadFixture([thread])
thread_fixture.push()
return thread
def setup_discussion_user(self, role=None, staff=False):
"""Set this test's user to have the given role in its
discussions. Role is one of 'Community TA', 'Moderator',
'Administrator', or 'Student'.
"""
kwargs = {
'course_id': self.course_id,
'staff': staff
}
if role is not None:
kwargs['roles'] = role
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, **kwargs).visit().user_info
def verify_teams_discussion_permissions(self, should_have_permission):
"""Verify that the teams discussion component is in the correct state
for the test user. If `should_have_permission` is True, assert that
the user can see controls for posting replies, voting, editing, and
deleting. Otherwise, assert that those controls are hidden.
"""
thread = self.setup_thread()
self.team_page.visit()
self.assertEqual(self.team_page.discussion_id, self.teams[0]['discussion_topic_id'])
discussion = self.team_page.discussion_page
self.assertTrue(discussion.is_browser_on_page())
self.assertTrue(discussion.is_discussion_expanded())
self.assertEqual(discussion.get_num_displayed_threads(), 1)
self.assertTrue(discussion.has_thread(thread['id']))
assertion = self.assertTrue if should_have_permission else self.assertFalse
assertion(discussion.q(css='.post-header-actions').present)
assertion(discussion.q(css='.add-response').present)
assertion(discussion.q(css='.new-post-btn').present)
def test_discussion_on_my_team_page(self):
"""
Scenario: Team Page renders a discussion for a team to which I belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the existing thread
And I should see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership()
self.verify_teams_discussion_permissions(True)
@ddt.data(True, False)
def test_discussion_on_other_team_page(self, is_staff):
"""
Scenario: Team Page renders a team discussion for a team to which I do
not belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the team's thread
And I should not see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(staff=is_staff)
self.verify_teams_discussion_permissions(False)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged(self, role):
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(role=role)
self.verify_teams_discussion_permissions(True)
def assert_team_details(self, num_members, is_member=True, max_size=10):
"""
Verifies that user can see all the information, present on detail page according to their membership status.
Arguments:
num_members (int): number of users in a team
is_member (bool) default True: True if request user is member else False
max_size (int): number of users a team can have
"""
self.assertEqual(
self.team_page.team_capacity_text,
self.team_page.format_capacity_text(num_members, max_size)
)
self.assertEqual(self.team_page.team_location, 'Afghanistan')
self.assertEqual(self.team_page.team_language, 'Afar')
self.assertEqual(self.team_page.team_members, num_members)
if num_members > 0:
self.assertTrue(self.team_page.team_members_present)
else:
self.assertFalse(self.team_page.team_members_present)
if is_member:
self.assertEqual(self.team_page.team_user_membership_text, 'You are a member of this team.')
self.assertTrue(self.team_page.team_leave_link_present)
self.assertTrue(self.team_page.new_post_button_present)
else:
self.assertEqual(self.team_page.team_user_membership_text, '')
self.assertFalse(self.team_page.team_leave_link_present)
self.assertFalse(self.team_page.new_post_button_present)
def test_team_member_can_see_full_team_details(self):
"""
Scenario: Team member can see full info for team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see the full team detail
And I should see the team members
And I should see my team membership text
And I should see the language & country
And I should see the Leave Team and Invite Team
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assert_team_details(
num_members=1,
)
def test_other_users_can_see_limited_team_details(self):
"""
Scenario: Users who are not member of this team can only see limited info for this team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When I visit the Team page for that team
Then I should not see full team detail
And I should see the team members
And I should not see my team membership text
And I should not see the Leave Team and Invite Team links
"""
self._set_team_configuration_and_membership(create_membership=False)
self.team_page.visit()
self.assert_team_details(is_member=False, num_members=0)
def test_user_can_navigate_to_members_profile_page(self):
"""
Scenario: User can navigate to profile page via team member profile image.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see profile images for the team members
When I click on the first profile image
Then I should be taken to the user's profile page
And I should see the username on profile page
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
learner_name = self.team_page.first_member_username
self.team_page.click_first_profile_image()
learner_profile_page = LearnerProfilePage(self.browser, learner_name)
learner_profile_page.wait_for_page()
learner_profile_page.wait_for_field('username')
self.assertTrue(learner_profile_page.field_is_visible('username'))
def test_join_team(self):
"""
Scenario: User can join a Team if not a member already..
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I visit the Team page for that team
Then I should see Join Team button
And I should not see New Post button
When I click on Join Team button
Then there should be no Join Team button and no message
And an analytics event should be emitted
And I should see the updated information under Team Details
And I should see New Post button
And if I switch to "My Team", the team I have joined is displayed
"""
self._set_team_configuration_and_membership(create_membership=False)
teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
teams_page.visit()
teams_page.view_first_team()
self.assertTrue(self.team_page.join_team_button_present)
expected_events = [
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'joined_from_team_view'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_page.click_join_team_button()
self.assertFalse(self.team_page.join_team_button_present)
self.assertFalse(self.team_page.join_team_message_present)
self.assert_team_details(num_members=1, is_member=True)
# Verify that if one switches to "My Team" without reloading the page, the newly joined team is shown.
self.teams_page.click_all_topics()
self.verify_my_team_count(1)
def test_already_member_message(self):
"""
Scenario: User should see `You are already in a team` if user is a
member of other team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am already a member of a team
And I visit a team other than mine
Then I should see `You are already in a team` message
"""
self._set_team_configuration_and_membership(membership_team_index=0, visit_team_index=1)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'You already belong to another team.')
self.assert_team_details(num_members=0, is_member=False)
def test_team_full_message(self):
"""
Scenario: User should see `Team is full` message when team is full.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And team has no space left
And I am not a member of any team
And I visit the team
Then I should see `Team is full` message
"""
self._set_team_configuration_and_membership(
create_membership=True,
max_team_size=1,
membership_team_index=0,
visit_team_index=0,
another_user=True
)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'This team is full.')
self.assert_team_details(num_members=1, is_member=False, max_size=1)
def test_leave_team(self):
"""
Scenario: User can leave a team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am a member of team
And I visit the team
And I should not see Join Team button
And I should see New Post button
Then I should see Leave Team link
When I click on Leave Team link
Then user should be removed from team
And an analytics event should be emitted
And I should see Join Team button
And I should not see New Post button
And if I switch to "My Team", the team I have left is not displayed
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assertFalse(self.team_page.join_team_button_present)
self.assert_team_details(num_members=1)
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'remove_method': 'self_removal'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_page.click_leave_team_link()
self.assert_team_details(num_members=0, is_member=False)
self.assertTrue(self.team_page.join_team_button_present)
# Verify that if one switches to "My Team" without reloading the page, the old team no longer shows.
self.teams_page.click_all_topics()
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the team profile page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the team profile page
Then my browser should post a page viewed event
"""
self._set_team_configuration_and_membership()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-team',
'topic_id': self.topic['id'],
'team_id': self.teams[0]['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.team_page.visit()
|
louyihua/edx-platform
|
common/test/acceptance/tests/lms/test_teams.py
|
Python
|
agpl-3.0
| 83,640
|
[
"VisIt"
] |
313098dc9a621ae042214c3b9a0c1554651a6b6ba9650f737ffd35d3d0daab78
|
from setuptools import setup
setup(
name='sulley',
packages=['sulley'],
version='0.1.0b0',
description=('With Sulley, you can build SMS bots in just'
' a few lines of code. Powered by Twilio & Plivo,'
' Sulley requires very minimal configuration and '
'code to bring your bot to life!'),
long_description=('For more information, visit:'
' https://github.com/sandeepraju/sulley'),
author='Sandeep Raju Prabhakar',
author_email='me@sandeepraju.in',
license=open('LICENSE', 'r').read(),
url='https://github.com/sandeepraju/sulley',
download_url='https://github.com/sandeepraju/sulley/archive/master.zip',
install_requires=[
'plivo==0.11.1',
'twilio==5.4.0',
'Flask==0.11.1'
],
tests_require=[
'mock==2.0.0',
'pylint==1.6.4'
],
keywords=[
'sms', 'message', 'twilio', 'plivo',
'bot', 'text', 'communication'
],
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: BSD License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
]
)
|
sandeepraju/sulley
|
setup.py
|
Python
|
bsd-3-clause
| 1,321
|
[
"VisIt"
] |
66354266f5cbfdc798ed9f5b7e4ef2714106ae62390d3ad3e21c3feb62265a38
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Kura
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import unicode_literals
from docutils import nodes
from docutils.parsers.rst import directives, Directive
class YouTube(Directive):
""" Embed YouTube video in posts.
Based on the YouTube directive by Brian Hsu:
https://gist.github.com/1422773
VIDEO_ID is required, with / height are optional integer,
and align could be left / center / right.
Usage:
.. youtube:: VIDEO_ID
:width: 640
:height: 480
:align: center
"""
def align(argument):
"""Conversion function for the "align" option."""
return directives.choice(argument, ('left', 'center', 'right'))
required_arguments = 1
optional_arguments = 2
option_spec = {
'width': directives.positive_int,
'height': directives.positive_int,
'align': align
}
final_argument_whitespace = False
has_content = False
def run(self):
videoID = self.arguments[0].strip()
width = 420
height = 315
align = 'left'
if 'width' in self.options:
width = self.options['width']
if 'height' in self.options:
height = self.options['height']
if 'align' in self.options:
align = self.options['align']
url = 'https://www.youtube.com/embed/{}'.format(videoID)
div_block = '<div class="youtube" align="{}">'.format(align)
embed_block = '<iframe width="{}" height="{}" src="{}" '\
'frameborder="0"></iframe>'.format(width, height, url)
return [
nodes.raw('', div_block, format='html'),
nodes.raw('', embed_block, format='html'),
nodes.raw('', '</div>', format='html')]
def register():
directives.register_directive('youtube', YouTube)
|
getpelican-plugins/pelican_youtube
|
pelican_youtube/youtube.py
|
Python
|
mit
| 2,902
|
[
"Brian"
] |
38e051c3abe29cf20d12136f7524fcc21f3f146611361126311890f40370f10b
|
# snippet to be ran in the introspection window of a slice3dVWR to
# export the whole scene as a RIB file. Before you can do this,
# both BACKGROUND_RENDERER and GRADIENT_BACKGROUND in slice3dVWR.py
# should be set to False.
obj._orientation_widget.Off()
rw = obj._threedRenderer.GetRenderWindow()
re = vtk.vtkRIBExporter()
re.SetRenderWindow(rw)
re.SetFilePrefix('/tmp/slice3dVWR')
re.Write()
|
nagyistoce/devide
|
snippets/ribexport.py
|
Python
|
bsd-3-clause
| 398
|
[
"VTK"
] |
8c21f53afe198881ed49790ddd61c8a7615199daae848b562dc51c311bf63f09
|
#!/usr/bin/env python
##############################################################################################
#
#
# regrid_emissions_N96e.py
#
#
# Requirements:
# Iris 1.10, time, cf_units, numpy
#
#
# This Python script has been written by N.L. Abraham as part of the UKCA Tutorials:
# http://www.ukca.ac.uk/wiki/index.php/UKCA_Chemistry_and_Aerosol_Tutorials_at_vn10.4
#
# Copyright (C) 2015 University of Cambridge
#
# This is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# It is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You find a copy of the GNU Lesser General Public License at <http://www.gnu.org/licenses/>.
#
# Written by N. Luke Abraham 2016-10-20 <nla27@cam.ac.uk>
# Modified by Marcus Koehler 2017-10-12 <mok21@cam.ac.uk>
#
#
##############################################################################################
# preamble
import time
import iris
import cf_units
import numpy
# --- CHANGE THINGS BELOW THIS LINE TO WORK WITH YOUR FILES ETC. ---
# name of file containing an ENDGame grid, e.g. your model output
# NOTE: all the fields in the file should be on the same horizontal
# grid, as the field used MAY NOT be the first in order of STASH
grid_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/um/archer/ag542/apm.pp/ag542a.pm1988dec'
#
# name of emissions file
# NOTE: We use the fluxes from the Gregorian calendar file also for the 360_day emission files
emissions_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/emissions/combined_1950-2020/0.5x0.5/combined_sources_NVOC_1950-2020.nc'
#
# STASH code emissions are associated with
# 301-320: surface
# m01s00i315: NVOC surface emissions
#
# 321-340: full atmosphere
#
stash='m01s00i315'
# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---
species_name='NVOC'
# this is the grid we want to regrid to, e.g. N96 ENDGame
grd=iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()
# This is the original data
ems=iris.load_cube(emissions_file)
# make intersection between 0 and 360 longitude to ensure that
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))
# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()
# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
nems.coord(axis='y').guess_bounds()
# now regrid
ocube=nems.regrid(grd,iris.analysis.AreaWeighted())
# now add correct attributes and names to netCDF file
ocube.var_name='emissions_'+str.strip(species_name)
ocube.long_name='NVOC surf emissions expressed as carbon'
ocube.units=cf_units.Unit('kg m-2 s-1')
ocube.attributes['vertical_scaling']='surface'
ocube.attributes['um_stash_source']=stash
ocube.attributes['tracer_name']=str.strip(species_name)
# global attributes, so don't set in local_keys
# NOTE: all these should be strings, including the numbers!
# basic emissions type
ocube.attributes['emission_type']='1' # time series
ocube.attributes['update_type']='1' # same as above
ocube.attributes['update_freq_in_hours']='120' # i.e. 5 days
ocube.attributes['um_version']='10.6' # UM version
ocube.attributes['source']='combined_sources_CH3OH_1950-2020.nc'
ocube.attributes['title']='Time-varying monthly surface emissions of methanol from 1950 to 2020, expressed as carbon'
ocube.attributes['File_version']='v3'
ocube.attributes['File_creation_date']=time.ctime(time.time())
ocube.attributes['grid']='regular 1.875 x 1.25 degree longitude-latitude grid (N96e)'
ocube.attributes['history']=time.ctime(time.time())+': '+__file__+' \n'+ocube.attributes['history']
ocube.attributes['institution']='Centre for Atmospheric Science, Department of Chemistry, University of Cambridge, U.K.'
ocube.attributes['reference']='Granier et al., Clim. Change, 2011; Lamarque et al., Atmos. Chem. Phys., 2010'
del ocube.attributes['file_creation_date']
del ocube.attributes['description']
# rename and set time coord - mid-month from 1950-Jan to 2020-Dec
# this bit is annoyingly fiddly
ocube.coord(axis='t').var_name='time'
ocube.coord(axis='t').standard_name='time'
ocube.coords(axis='t')[0].units=cf_units.Unit('days since 1950-01-01 00:00:00', calendar='360_day')
ocube.coord(axis='t').points=numpy.array([
15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345, 375, 405,
435, 465, 495, 525, 555, 585, 615, 645, 675, 705, 735, 765, 795, 825,
855, 885, 915, 945, 975, 1005, 1035, 1065, 1095, 1125, 1155, 1185, 1215,
1245, 1275, 1305, 1335, 1365, 1395, 1425, 1455, 1485, 1515, 1545, 1575,
1605, 1635, 1665, 1695, 1725, 1755, 1785, 1815, 1845, 1875, 1905, 1935,
1965, 1995, 2025, 2055, 2085, 2115, 2145, 2175, 2205, 2235, 2265, 2295,
2325, 2355, 2385, 2415, 2445, 2475, 2505, 2535, 2565, 2595, 2625, 2655,
2685, 2715, 2745, 2775, 2805, 2835, 2865, 2895, 2925, 2955, 2985, 3015,
3045, 3075, 3105, 3135, 3165, 3195, 3225, 3255, 3285, 3315, 3345, 3375,
3405, 3435, 3465, 3495, 3525, 3555, 3585, 3615, 3645, 3675, 3705, 3735,
3765, 3795, 3825, 3855, 3885, 3915, 3945, 3975, 4005, 4035, 4065, 4095,
4125, 4155, 4185, 4215, 4245, 4275, 4305, 4335, 4365, 4395, 4425, 4455,
4485, 4515, 4545, 4575, 4605, 4635, 4665, 4695, 4725, 4755, 4785, 4815,
4845, 4875, 4905, 4935, 4965, 4995, 5025, 5055, 5085, 5115, 5145, 5175,
5205, 5235, 5265, 5295, 5325, 5355, 5385, 5415, 5445, 5475, 5505, 5535,
5565, 5595, 5625, 5655, 5685, 5715, 5745, 5775, 5805, 5835, 5865, 5895,
5925, 5955, 5985, 6015, 6045, 6075, 6105, 6135, 6165, 6195, 6225, 6255,
6285, 6315, 6345, 6375, 6405, 6435, 6465, 6495, 6525, 6555, 6585, 6615,
6645, 6675, 6705, 6735, 6765, 6795, 6825, 6855, 6885, 6915, 6945, 6975,
7005, 7035, 7065, 7095, 7125, 7155, 7185, 7215, 7245, 7275, 7305, 7335,
7365, 7395, 7425, 7455, 7485, 7515, 7545, 7575, 7605, 7635, 7665, 7695,
7725, 7755, 7785, 7815, 7845, 7875, 7905, 7935, 7965, 7995, 8025, 8055,
8085, 8115, 8145, 8175, 8205, 8235, 8265, 8295, 8325, 8355, 8385, 8415,
8445, 8475, 8505, 8535, 8565, 8595, 8625, 8655, 8685, 8715, 8745, 8775,
8805, 8835, 8865, 8895, 8925, 8955, 8985, 9015, 9045, 9075, 9105, 9135,
9165, 9195, 9225, 9255, 9285, 9315, 9345, 9375, 9405, 9435, 9465, 9495,
9525, 9555, 9585, 9615, 9645, 9675, 9705, 9735, 9765, 9795, 9825, 9855,
9885, 9915, 9945, 9975, 10005, 10035, 10065, 10095, 10125, 10155, 10185,
10215, 10245, 10275, 10305, 10335, 10365, 10395, 10425, 10455, 10485,
10515, 10545, 10575, 10605, 10635, 10665, 10695, 10725, 10755, 10785,
10815, 10845, 10875, 10905, 10935, 10965, 10995, 11025, 11055, 11085,
11115, 11145, 11175, 11205, 11235, 11265, 11295, 11325, 11355, 11385,
11415, 11445, 11475, 11505, 11535, 11565, 11595, 11625, 11655, 11685,
11715, 11745, 11775, 11805, 11835, 11865, 11895, 11925, 11955, 11985,
12015, 12045, 12075, 12105, 12135, 12165, 12195, 12225, 12255, 12285,
12315, 12345, 12375, 12405, 12435, 12465, 12495, 12525, 12555, 12585,
12615, 12645, 12675, 12705, 12735, 12765, 12795, 12825, 12855, 12885,
12915, 12945, 12975, 13005, 13035, 13065, 13095, 13125, 13155, 13185,
13215, 13245, 13275, 13305, 13335, 13365, 13395, 13425, 13455, 13485,
13515, 13545, 13575, 13605, 13635, 13665, 13695, 13725, 13755, 13785,
13815, 13845, 13875, 13905, 13935, 13965, 13995, 14025, 14055, 14085,
14115, 14145, 14175, 14205, 14235, 14265, 14295, 14325, 14355, 14385,
14415, 14445, 14475, 14505, 14535, 14565, 14595, 14625, 14655, 14685,
14715, 14745, 14775, 14805, 14835, 14865, 14895, 14925, 14955, 14985,
15015, 15045, 15075, 15105, 15135, 15165, 15195, 15225, 15255, 15285,
15315, 15345, 15375, 15405, 15435, 15465, 15495, 15525, 15555, 15585,
15615, 15645, 15675, 15705, 15735, 15765, 15795, 15825, 15855, 15885,
15915, 15945, 15975, 16005, 16035, 16065, 16095, 16125, 16155, 16185,
16215, 16245, 16275, 16305, 16335, 16365, 16395, 16425, 16455, 16485,
16515, 16545, 16575, 16605, 16635, 16665, 16695, 16725, 16755, 16785,
16815, 16845, 16875, 16905, 16935, 16965, 16995, 17025, 17055, 17085,
17115, 17145, 17175, 17205, 17235, 17265, 17295, 17325, 17355, 17385,
17415, 17445, 17475, 17505, 17535, 17565, 17595, 17625, 17655, 17685,
17715, 17745, 17775, 17805, 17835, 17865, 17895, 17925, 17955, 17985,
18015, 18045, 18075, 18105, 18135, 18165, 18195, 18225, 18255, 18285,
18315, 18345, 18375, 18405, 18435, 18465, 18495, 18525, 18555, 18585,
18615, 18645, 18675, 18705, 18735, 18765, 18795, 18825, 18855, 18885,
18915, 18945, 18975, 19005, 19035, 19065, 19095, 19125, 19155, 19185,
19215, 19245, 19275, 19305, 19335, 19365, 19395, 19425, 19455, 19485,
19515, 19545, 19575, 19605, 19635, 19665, 19695, 19725, 19755, 19785,
19815, 19845, 19875, 19905, 19935, 19965, 19995, 20025, 20055, 20085,
20115, 20145, 20175, 20205, 20235, 20265, 20295, 20325, 20355, 20385,
20415, 20445, 20475, 20505, 20535, 20565, 20595, 20625, 20655, 20685,
20715, 20745, 20775, 20805, 20835, 20865, 20895, 20925, 20955, 20985,
21015, 21045, 21075, 21105, 21135, 21165, 21195, 21225, 21255, 21285,
21315, 21345, 21375, 21405, 21435, 21465, 21495, 21525, 21555, 21585,
21615, 21645, 21675, 21705, 21735, 21765, 21795, 21825, 21855, 21885,
21915, 21945, 21975, 22005, 22035, 22065, 22095, 22125, 22155, 22185,
22215, 22245, 22275, 22305, 22335, 22365, 22395, 22425, 22455, 22485,
22515, 22545, 22575, 22605, 22635, 22665, 22695, 22725, 22755, 22785,
22815, 22845, 22875, 22905, 22935, 22965, 22995, 23025, 23055, 23085,
23115, 23145, 23175, 23205, 23235, 23265, 23295, 23325, 23355, 23385,
23415, 23445, 23475, 23505, 23535, 23565, 23595, 23625, 23655, 23685,
23715, 23745, 23775, 23805, 23835, 23865, 23895, 23925, 23955, 23985,
24015, 24045, 24075, 24105, 24135, 24165, 24195, 24225, 24255, 24285,
24315, 24345, 24375, 24405, 24435, 24465, 24495, 24525, 24555, 24585,
24615, 24645, 24675, 24705, 24735, 24765, 24795, 24825, 24855, 24885,
24915, 24945, 24975, 25005, 25035, 25065, 25095, 25125, 25155, 25185,
25215, 25245, 25275, 25305, 25335, 25365, 25395, 25425, 25455, 25485,
25515, 25545 ])
# make z-direction.
zdims=iris.coords.DimCoord(numpy.array([0]),standard_name = 'model_level_number',
units='1',attributes={'positive':'up'})
ocube.add_aux_coord(zdims)
ocube=iris.util.new_axis(ocube, zdims)
# now transpose cube to put Z 2nd
ocube.transpose([1,0,2,3])
# make coordinates 64-bit
ocube.coord(axis='x').points=ocube.coord(axis='x').points.astype(dtype='float64')
ocube.coord(axis='y').points=ocube.coord(axis='y').points.astype(dtype='float64')
#ocube.coord(axis='z').points=ocube.coord(axis='z').points.astype(dtype='float64') # integer
ocube.coord(axis='t').points=ocube.coord(axis='t').points.astype(dtype='float64')
# for some reason, longitude_bounds are double, but latitude_bounds are float
ocube.coord('latitude').bounds=ocube.coord('latitude').bounds.astype(dtype='float64')
# add forecast_period & forecast_reference_time
# forecast_reference_time
frt=numpy.array([
15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345, 375, 405,
435, 465, 495, 525, 555, 585, 615, 645, 675, 705, 735, 765, 795, 825,
855, 885, 915, 945, 975, 1005, 1035, 1065, 1095, 1125, 1155, 1185, 1215,
1245, 1275, 1305, 1335, 1365, 1395, 1425, 1455, 1485, 1515, 1545, 1575,
1605, 1635, 1665, 1695, 1725, 1755, 1785, 1815, 1845, 1875, 1905, 1935,
1965, 1995, 2025, 2055, 2085, 2115, 2145, 2175, 2205, 2235, 2265, 2295,
2325, 2355, 2385, 2415, 2445, 2475, 2505, 2535, 2565, 2595, 2625, 2655,
2685, 2715, 2745, 2775, 2805, 2835, 2865, 2895, 2925, 2955, 2985, 3015,
3045, 3075, 3105, 3135, 3165, 3195, 3225, 3255, 3285, 3315, 3345, 3375,
3405, 3435, 3465, 3495, 3525, 3555, 3585, 3615, 3645, 3675, 3705, 3735,
3765, 3795, 3825, 3855, 3885, 3915, 3945, 3975, 4005, 4035, 4065, 4095,
4125, 4155, 4185, 4215, 4245, 4275, 4305, 4335, 4365, 4395, 4425, 4455,
4485, 4515, 4545, 4575, 4605, 4635, 4665, 4695, 4725, 4755, 4785, 4815,
4845, 4875, 4905, 4935, 4965, 4995, 5025, 5055, 5085, 5115, 5145, 5175,
5205, 5235, 5265, 5295, 5325, 5355, 5385, 5415, 5445, 5475, 5505, 5535,
5565, 5595, 5625, 5655, 5685, 5715, 5745, 5775, 5805, 5835, 5865, 5895,
5925, 5955, 5985, 6015, 6045, 6075, 6105, 6135, 6165, 6195, 6225, 6255,
6285, 6315, 6345, 6375, 6405, 6435, 6465, 6495, 6525, 6555, 6585, 6615,
6645, 6675, 6705, 6735, 6765, 6795, 6825, 6855, 6885, 6915, 6945, 6975,
7005, 7035, 7065, 7095, 7125, 7155, 7185, 7215, 7245, 7275, 7305, 7335,
7365, 7395, 7425, 7455, 7485, 7515, 7545, 7575, 7605, 7635, 7665, 7695,
7725, 7755, 7785, 7815, 7845, 7875, 7905, 7935, 7965, 7995, 8025, 8055,
8085, 8115, 8145, 8175, 8205, 8235, 8265, 8295, 8325, 8355, 8385, 8415,
8445, 8475, 8505, 8535, 8565, 8595, 8625, 8655, 8685, 8715, 8745, 8775,
8805, 8835, 8865, 8895, 8925, 8955, 8985, 9015, 9045, 9075, 9105, 9135,
9165, 9195, 9225, 9255, 9285, 9315, 9345, 9375, 9405, 9435, 9465, 9495,
9525, 9555, 9585, 9615, 9645, 9675, 9705, 9735, 9765, 9795, 9825, 9855,
9885, 9915, 9945, 9975, 10005, 10035, 10065, 10095, 10125, 10155, 10185,
10215, 10245, 10275, 10305, 10335, 10365, 10395, 10425, 10455, 10485,
10515, 10545, 10575, 10605, 10635, 10665, 10695, 10725, 10755, 10785,
10815, 10845, 10875, 10905, 10935, 10965, 10995, 11025, 11055, 11085,
11115, 11145, 11175, 11205, 11235, 11265, 11295, 11325, 11355, 11385,
11415, 11445, 11475, 11505, 11535, 11565, 11595, 11625, 11655, 11685,
11715, 11745, 11775, 11805, 11835, 11865, 11895, 11925, 11955, 11985,
12015, 12045, 12075, 12105, 12135, 12165, 12195, 12225, 12255, 12285,
12315, 12345, 12375, 12405, 12435, 12465, 12495, 12525, 12555, 12585,
12615, 12645, 12675, 12705, 12735, 12765, 12795, 12825, 12855, 12885,
12915, 12945, 12975, 13005, 13035, 13065, 13095, 13125, 13155, 13185,
13215, 13245, 13275, 13305, 13335, 13365, 13395, 13425, 13455, 13485,
13515, 13545, 13575, 13605, 13635, 13665, 13695, 13725, 13755, 13785,
13815, 13845, 13875, 13905, 13935, 13965, 13995, 14025, 14055, 14085,
14115, 14145, 14175, 14205, 14235, 14265, 14295, 14325, 14355, 14385,
14415, 14445, 14475, 14505, 14535, 14565, 14595, 14625, 14655, 14685,
14715, 14745, 14775, 14805, 14835, 14865, 14895, 14925, 14955, 14985,
15015, 15045, 15075, 15105, 15135, 15165, 15195, 15225, 15255, 15285,
15315, 15345, 15375, 15405, 15435, 15465, 15495, 15525, 15555, 15585,
15615, 15645, 15675, 15705, 15735, 15765, 15795, 15825, 15855, 15885,
15915, 15945, 15975, 16005, 16035, 16065, 16095, 16125, 16155, 16185,
16215, 16245, 16275, 16305, 16335, 16365, 16395, 16425, 16455, 16485,
16515, 16545, 16575, 16605, 16635, 16665, 16695, 16725, 16755, 16785,
16815, 16845, 16875, 16905, 16935, 16965, 16995, 17025, 17055, 17085,
17115, 17145, 17175, 17205, 17235, 17265, 17295, 17325, 17355, 17385,
17415, 17445, 17475, 17505, 17535, 17565, 17595, 17625, 17655, 17685,
17715, 17745, 17775, 17805, 17835, 17865, 17895, 17925, 17955, 17985,
18015, 18045, 18075, 18105, 18135, 18165, 18195, 18225, 18255, 18285,
18315, 18345, 18375, 18405, 18435, 18465, 18495, 18525, 18555, 18585,
18615, 18645, 18675, 18705, 18735, 18765, 18795, 18825, 18855, 18885,
18915, 18945, 18975, 19005, 19035, 19065, 19095, 19125, 19155, 19185,
19215, 19245, 19275, 19305, 19335, 19365, 19395, 19425, 19455, 19485,
19515, 19545, 19575, 19605, 19635, 19665, 19695, 19725, 19755, 19785,
19815, 19845, 19875, 19905, 19935, 19965, 19995, 20025, 20055, 20085,
20115, 20145, 20175, 20205, 20235, 20265, 20295, 20325, 20355, 20385,
20415, 20445, 20475, 20505, 20535, 20565, 20595, 20625, 20655, 20685,
20715, 20745, 20775, 20805, 20835, 20865, 20895, 20925, 20955, 20985,
21015, 21045, 21075, 21105, 21135, 21165, 21195, 21225, 21255, 21285,
21315, 21345, 21375, 21405, 21435, 21465, 21495, 21525, 21555, 21585,
21615, 21645, 21675, 21705, 21735, 21765, 21795, 21825, 21855, 21885,
21915, 21945, 21975, 22005, 22035, 22065, 22095, 22125, 22155, 22185,
22215, 22245, 22275, 22305, 22335, 22365, 22395, 22425, 22455, 22485,
22515, 22545, 22575, 22605, 22635, 22665, 22695, 22725, 22755, 22785,
22815, 22845, 22875, 22905, 22935, 22965, 22995, 23025, 23055, 23085,
23115, 23145, 23175, 23205, 23235, 23265, 23295, 23325, 23355, 23385,
23415, 23445, 23475, 23505, 23535, 23565, 23595, 23625, 23655, 23685,
23715, 23745, 23775, 23805, 23835, 23865, 23895, 23925, 23955, 23985,
24015, 24045, 24075, 24105, 24135, 24165, 24195, 24225, 24255, 24285,
24315, 24345, 24375, 24405, 24435, 24465, 24495, 24525, 24555, 24585,
24615, 24645, 24675, 24705, 24735, 24765, 24795, 24825, 24855, 24885,
24915, 24945, 24975, 25005, 25035, 25065, 25095, 25125, 25155, 25185,
25215, 25245, 25275, 25305, 25335, 25365, 25395, 25425, 25455, 25485,
25515, 25545 ], dtype='float64')
frt_dims=iris.coords.AuxCoord(frt,standard_name = 'forecast_reference_time',
units=cf_units.Unit('days since 1950-01-01 00:00:00', calendar='360_day'))
ocube.add_aux_coord(frt_dims,data_dims=0)
ocube.coord('forecast_reference_time').guess_bounds()
# forecast_period
fp=numpy.array([-360],dtype='float64')
fp_dims=iris.coords.AuxCoord(fp,standard_name = 'forecast_period',
units=cf_units.Unit('hours'),bounds=numpy.array([-720,0],dtype='float64'))
ocube.add_aux_coord(fp_dims,data_dims=None)
# add-in cell_methods
ocube.cell_methods = [iris.coords.CellMethod('mean', 'time')]
# set _FillValue
fillval=1e+20
ocube.data = numpy.ma.array(data=ocube.data, fill_value=fillval, dtype='float32')
# output file name, based on species
outpath='ukca_emiss_'+species_name+'.nc'
# don't want time to be cattable, as is a periodic emissions file
iris.FUTURE.netcdf_no_unlimited=True
# annoying hack to set a missing_value attribute as well as a _FillValue attribute
dict.__setitem__(ocube.attributes, 'missing_value', fillval)
# now write-out to netCDF
saver = iris.fileformats.netcdf.Saver(filename=outpath, netcdf_format='NETCDF3_CLASSIC')
saver.update_global_attributes(Conventions=iris.fileformats.netcdf.CF_CONVENTIONS_VERSION)
saver.write(ocube, local_keys=['vertical_scaling', 'missing_value','um_stash_source','tracer_name'])
# end of script
|
acsis-project/emissions
|
emissions/python/timeseries_1950-2020/regrid_NVOC_emissions_n96e_360d.py
|
Python
|
gpl-3.0
| 19,052
|
[
"NetCDF"
] |
0a33f49161092e048732a319154b2153ee9a8a8af54970ec8b50e7382fbad9e9
|
import os
from collections import OrderedDict, deque
from six.moves.cPickle import loads, dumps
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_array_equal, assert_array_almost_equal
import oddt
from oddt.spatial import rmsd
from oddt.toolkits.common import canonize_ring_path
test_data_dir = os.path.dirname(os.path.abspath(__file__))
xiap_receptor = os.path.join(test_data_dir, 'data', 'dude', 'xiap',
'receptor_rdkit.pdb')
xiap_actives = os.path.join(test_data_dir, 'data', 'dude', 'xiap',
'actives_docked.sdf')
def test_mol():
"""Test common molecule operations"""
# Hydrogen manipulation in small molecules
mol = oddt.toolkit.readstring('smi', 'c1ccccc1O')
assert len(mol.atoms) == 7
mol.addh()
assert len(mol.atoms) == 13
mol.removeh()
mol.addh(only_polar=True)
assert len(mol.atoms) == 8
mol.removeh()
assert len(mol.atoms) == 7
# Hydrogen manipulation in proteins
protein = next(oddt.toolkit.readfile('pdb', xiap_receptor))
protein.protein = True
res_atoms_n = [6, 10, 8, 8, 7, 11, 8, 7, 6, 8, 5, 8, 12, 9, 5, 11, 8,
11, 7, 11, 4, 7, 14, 8, 12, 6, 7, 8, 9, 9, 9, 8, 5, 11,
5, 4, 11, 12, 5, 8, 4, 9, 4, 8, 9, 7, 9, 6, 11, 10, 6,
4, 4, 4, 8, 7, 8, 14, 9, 7, 6, 9, 8, 7, 14, 9, 9, 10, 5,
9, 14, 12, 7, 4, 6, 9, 12, 8, 8, 9, 9, 9, 4, 9, 9, 12,
8, 8, 8, 8, 10, 8, 7, 10, 11, 12, 6, 7, 8, 11, 8, 9, 4,
8, 9, 7, 9, 6, 6, 4, 4, 4, 8, 7, 8, 14, 9, 7, 6, 9, 8,
7, 14, 9, 9, 10, 5, 9, 14, 12, 7, 4, 8, 10, 8, 7, 1, 1]
res_atoms_n_addh = [12, 17, 17, 19, 14, 23, 14, 14, 11, 17, 10, 13, 21,
16, 10, 23, 19, 20, 14, 20, 7, 14, 24, 19, 21, 11,
16, 14, 21, 16, 17, 19, 10, 23, 10, 7, 20, 21, 10,
19, 7, 16, 7, 13, 21, 16, 21, 10, 20, 17, 10, 7, 7,
7, 19, 14, 13, 24, 21, 14, 11, 16, 13, 14, 24, 16,
17, 16, 10, 21, 24, 21, 14, 7, 10, 21, 21, 19, 19,
16, 17, 21, 7, 17, 16, 21, 19, 14, 14, 19, 17, 19,
14, 18, 25, 22, 11, 17, 21, 22, 21, 17, 7, 13, 21,
16, 21, 11, 11, 7, 7, 7, 19, 14, 13, 24, 21, 14,
11, 16, 13, 14, 24, 16, 17, 16, 10, 21, 24, 21, 14,
8, 20, 17, 19, 15, 1, 1]
res_atoms_n_polarh = [9, 12, 9, 9, 7, 16, 11, 7, 8, 9, 6, 10, 14, 11,
6, 16, 9, 12, 9, 12, 5, 9, 16, 9, 14, 8, 8, 11,
12, 11, 12, 9, 6, 16, 6, 5, 12, 14, 6, 9, 5, 11,
5, 10, 12, 8, 12, 7, 12, 12, 7, 5, 5, 5, 9, 9,
10, 16, 12, 7, 8, 11, 10, 7, 16, 11, 12, 11, 6,
12, 16, 14, 7, 5, 7, 12, 14, 9, 9, 11, 12, 12, 5,
12, 11, 14, 9, 11, 11, 9, 12, 9, 9, 12, 17, 15,
8, 8, 10, 13, 10, 12, 5, 10, 12, 8, 12, 7, 8, 5,
5, 5, 9, 9, 10, 16, 12, 7, 8, 11, 10, 7, 16, 11,
12, 11, 6, 12, 16, 14, 7, 5, 10, 12, 9, 9, 1, 1]
assert len(protein.atoms) == 1114
assert len(protein.residues) == 138
assert_array_equal([len(res.atoms) for res in protein.residues],
res_atoms_n)
protein.addh()
assert len(protein.atoms) == 2170
assert len(protein.residues) == 138
assert_array_equal([len(res.atoms) for res in protein.residues],
res_atoms_n_addh)
protein.removeh()
protein.addh(only_polar=True)
assert len(protein.atoms) == 1356
assert len(protein.residues) == 138
assert_array_equal([len(res.atoms) for res in protein.residues],
res_atoms_n_polarh)
protein.removeh()
assert len(protein.atoms) == 1114
assert len(protein.residues) == 138
assert_array_equal([len(res.atoms) for res in protein.residues],
res_atoms_n)
def test_mol_calccharges():
mol = oddt.toolkit.readstring('smi', 'c1ccccc1O')
mol.addh()
with pytest.raises(ValueError):
mol.calccharges('mmff94aaaaaa')
for m in ['gasteiger', 'mmff94']:
mol.calccharges(m)
assert (np.array(mol.charges) != 0.).any()
protein = next(oddt.toolkit.readfile('pdb', xiap_receptor))
protein.protein = True
# for that protein mmff94 charges could not be generated
with pytest.raises(Exception):
protein.calccharges('mmff94')
def test_toolkit_hoh():
"""HOH residues splitting"""
pdb_block = """ATOM 1 C1 GLY 1 0.000 0.000 0.000 1.00 0.00 C
ATOM 2 C2 GLY 1 0.000 0.000 0.000 1.00 0.00 C
ATOM 3 O1 GLY 1 0.000 0.000 0.000 1.00 0.00 O
ATOM 4 O2 GLY 1 0.000 0.000 0.000 1.00 0.00 O
ATOM 5 N1 GLY 1 0.000 0.000 0.000 1.00 0.00 N
ATOM 6 O3 HOH 2 0.000 0.000 0.000 1.00 0.00 O
ATOM 7 O4 HOH 3 0.000 0.000 0.000 1.00 0.00 O
ATOM 8 O5 HOH 4 0.000 0.000 0.000 1.00 0.00 O
"""
protein = oddt.toolkit.readstring('pdb', pdb_block)
protein.protein = True
assert len(protein.residues) == 4
protein.addh(only_polar=True)
assert len(protein.residues) == 4
protein.addh()
assert len(protein.residues) == 4
def test_pickle():
"""Pickle molecules"""
mols = list(oddt.toolkit.readfile('sdf', xiap_actives))
pickled_mols = list(map(lambda x: loads(dumps(x)), mols))
assert_array_equal(list(map(lambda x: x.title, mols)),
list(map(lambda x: x.title, pickled_mols)))
assert_array_equal(list(map(lambda x: x.smiles, mols)),
list(map(lambda x: x.smiles, pickled_mols)))
for mol, pickled_mol in zip(mols, pickled_mols):
assert dict(mol.data) == dict(pickled_mol.data)
# Test pickling of atom_dicts
assert_array_equal(list(map(lambda x: x._atom_dict is None, mols)),
[True] * len(mols))
mols_atom_dict = np.hstack(list(map(lambda x: x.atom_dict, mols)))
assert_array_equal(list(map(lambda x: x._atom_dict is not None, mols)),
[True] * len(mols))
pickled_mols = list(map(lambda x: loads(dumps(x)), mols))
assert_array_equal(list(map(lambda x: x._atom_dict is not None, pickled_mols)),
[True] * len(mols))
pickled_mols_atom_dict = np.hstack(list(map(lambda x: x._atom_dict, pickled_mols)))
for name in mols[0].atom_dict.dtype.names:
if issubclass(np.dtype(mols_atom_dict[name].dtype).type, np.number):
assert_array_almost_equal(mols_atom_dict[name],
pickled_mols_atom_dict[name])
else:
assert_array_equal(mols_atom_dict[name],
pickled_mols_atom_dict[name])
# Lazy Mols
mols = list(oddt.toolkit.readfile('sdf', xiap_actives, lazy=True))
pickled_mols = list(map(lambda x: loads(dumps(x)), mols))
assert_array_equal(list(map(lambda x: x._source is not None, pickled_mols)),
[True] * len(mols))
assert_array_equal(list(map(lambda x: x.title, mols)),
list(map(lambda x: x.title, pickled_mols)))
assert_array_equal(list(map(lambda x: x.smiles, mols)),
list(map(lambda x: x.smiles, pickled_mols)))
for mol, pickled_mol in zip(mols, pickled_mols):
assert dict(mol.data) == dict(pickled_mol.data)
def test_diverse_conformers():
# FIXME: make toolkit a module so we can import from it
diverse_conformers_generator = oddt.toolkit.diverse_conformers_generator
mol = oddt.toolkit.readstring(
'smi',
'CN1CCN(S(=O)(C2=CC=C(OCC)C(C3=NC4=C(N(C)N=C4CCC)C(N3)=O)=C2)=O)CC1'
)
mol.make3D()
res = []
for conf in diverse_conformers_generator(mol, seed=123456):
res.append(rmsd(mol, conf))
assert len(res) == 10
if oddt.toolkit.backend == 'ob':
if oddt.toolkit.__version__ < '0.3':
assert_array_almost_equal(res, [0., 3.043712, 3.897143, 3.289482,
3.066374, 2.909683, 2.913927,
3.488244, 3.70603, 3.597467])
else:
assert_array_almost_equal(res, [0.0, 1.372770, 2.489789, 2.759941,
2.968366, 3.228773, 3.392191,
3.921166, 3.185065, 3.283915])
# else:
# if oddt.toolkit.__version__ > '2016.03.9':
# assert_array_almost_equal(res, [1.237538, 2.346984, 0.900624,
# 3.469511, 1.886213, 2.128909,
# 2.852608, 1.312513, 1.291595,
# 1.326843])
# else:
# assert_array_almost_equal(res, [3.08995, 2.846358, 3.021795,
# 1.720319, 2.741972, 2.965332,
# 2.925344, 2.930157, 2.934049,
# 3.009545])
# check all implemented methods
if oddt.toolkit.backend == 'ob':
methods = ['ga', 'confab']
else:
methods = ['dg', 'etkdg', 'kdg', 'etdg']
for method in methods:
assert len(diverse_conformers_generator(mol,
seed=123456,
n_conf=5,
method=method)) == 5
assert len(diverse_conformers_generator(mol,
seed=123456,
n_conf=10,
method=method)) == 10
assert len(diverse_conformers_generator(mol,
seed=123456,
n_conf=20,
method=method)) == 20
def test_indices():
"""Test 0 and 1 based atom indices"""
mol = oddt.toolkit.readstring('smi', 'CCc1cc(C)c(C)cc1-c1ccc(-c2cccc(C)c2)cc1')
atom = mol.atoms[0]
assert atom.idx0 == 0
assert atom.idx1 == 1
# the unmarked index is deprecated in ODDT
with pytest.warns((DeprecationWarning, FutureWarning)):
assert atom.idx == 1
def test_pickle_protein():
"""Pickle proteins"""
# Proteins
rec = next(oddt.toolkit.readfile('pdb', xiap_receptor))
# generate atom_dict
assert rec.atom_dict is not None
assert rec._atom_dict is not None
pickled_rec = loads(dumps(rec))
assert pickled_rec.protein is False
assert pickled_rec._atom_dict is not None
rec.protein = True
# setting protein property should clean atom_dict cache
assert rec._atom_dict is None
# generate atom_dict
assert rec.atom_dict is not None
pickled_rec = loads(dumps(rec))
assert pickled_rec.protein is True
assert pickled_rec._atom_dict is not None
if oddt.toolkit.backend == 'rdk':
def test_badmol():
"""Propagate None's for bad molecules"""
mol = oddt.toolkit.readstring('smi', 'c1cc2')
assert mol is None
def test_dicts():
"""Test ODDT numpy structures, aka. dicts"""
mols = list(oddt.toolkit.readfile('sdf', xiap_actives))
list(map(lambda x: x.addh(only_polar=True), mols))
skip_cols = ['radius', 'charge', 'id',
# following fields need to be standarized
'hybridization',
'numhs',
'formalcharge',
]
all_cols = [name for name in mols[0].atom_dict.dtype.names
if name not in ['coords', 'neighbors', 'neighbors_id']]
common_cols = [name for name in all_cols if name not in skip_cols]
# Small molecules
all_dicts = np.hstack([mol.atom_dict for mol in mols])
all_dicts = all_dicts[all_dicts['atomicnum'] != 1]
data = pd.DataFrame({name: all_dicts[name] for name in all_cols})
data['mol_idx'] = [i
for i, mol in enumerate(mols)
for atom in mol
if atom.atomicnum != 1]
# Save correct results
# data[common_cols].to_csv(
# os.path.join(test_data_dir, 'data/results/xiap/mols_atom_dict.csv'),
# index=False)
corr_data = pd.read_csv(os.path.join(test_data_dir, 'data', 'results',
'xiap', 'mols_atom_dict.csv')
).fillna('')
for name in common_cols:
if issubclass(np.dtype(data[name].dtype).type, np.number):
mask = data[name] - corr_data[name] > 1e-6
for i in np.argwhere(mask.values):
print(i, data[name][i].values, corr_data[name][i].values,
mols[data['mol_idx'][int(i)]].write('smi'))
assert_array_almost_equal(
data[name],
corr_data[name],
err_msg='Mols atom_dict\'s collumn: "%s" is not equal' % name)
else:
mask = data[name] != corr_data[name]
for i in np.argwhere(mask.values):
print(i, data[name][i].values, corr_data[name][i].values,
mols[data['mol_idx'][int(i)]].write('smi'))
assert_array_equal(
data[name],
corr_data[name],
err_msg='Mols atom_dict\'s collumn: "%s" is not equal' % name)
# Protein
rec = next(oddt.toolkit.readfile('pdb', xiap_receptor))
rec.protein = True
rec.addh(only_polar=True)
skip_cols = ['radius', 'charge', 'resid', 'id',
# following fields need to be standarized
'hybridization',
'numhs',
'formalcharge',
]
common_cols = [name for name in all_cols if name not in skip_cols]
all_dicts = rec.atom_dict[rec.atom_dict['atomicnum'] != 1]
data = pd.DataFrame({name: all_dicts[name] for name in all_cols})
# Save correct results
# data[common_cols].to_csv(
# os.path.join(test_data_dir, 'data/results/xiap/prot_atom_dict.csv'),
# index=False)
corr_data = pd.read_csv(os.path.join(test_data_dir, 'data', 'results',
'xiap', 'prot_atom_dict.csv')
).fillna('')
for name in common_cols:
if issubclass(np.dtype(data[name].dtype).type, np.number):
mask = data[name] - corr_data[name] > 1e-6
for i in np.argwhere(mask.values):
print(i,
data['atomtype'][i].values,
data['resname'][i].values,
data[name][i].values,
corr_data[name][i].values)
assert_array_almost_equal(
data[name],
corr_data[name],
err_msg='Protein atom_dict\'s collumn: "%s" is not equal' % name)
else:
mask = data[name] != corr_data[name]
for i in np.argwhere(mask.values):
print(i,
data['atomtype'][i].values,
data['resname'][i].values,
data[name][i].values,
corr_data[name][i].values)
assert_array_equal(
data[name],
corr_data[name],
err_msg='Protein atom_dict\'s collumn: "%s" is not equal' % name)
def test_ss():
"""Secondary structure assignment"""
# Alpha Helix
prot_file = os.path.join(test_data_dir, 'data', 'pdb', '1cos_helix.pdb')
protein = next(oddt.toolkit.readfile('pdb', prot_file))
protein.protein = True
# print(protein.res_dict['resname'])
# print(protein.res_dict['isalpha'])
# print(protein.res_dict['isbeta'])
isalpha = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26]
assert len(protein.res_dict) == 29
assert_array_equal(np.where(protein.res_dict['isalpha'])[0], isalpha)
assert protein.res_dict['isalpha'].sum() == 27
assert protein.res_dict['isbeta'].sum() == 0
# Beta Sheet
prot_file = os.path.join(test_data_dir, 'data', 'pdb', '1icl_sheet.pdb')
protein = next(oddt.toolkit.readfile('pdb', prot_file))
protein.protein = True
# print(protein.res_dict['resname'])
# print(protein.res_dict['isalpha'])
# print(protein.res_dict['isbeta'])
# print(protein.res_dict['isbeta'])
# for mask_group in np.split(np.argwhere(protein.res_dict['isbeta']).flatten(),
# np.argwhere(np.diff(np.argwhere(protein.res_dict['isbeta']).flatten()) != 1).flatten() + 1):
# print(mask_group + 1, protein.res_dict[mask_group]['resname'])
isbeta = [2, 3, 4, 5, 10, 11, 12, 13]
assert len(protein.res_dict) == 29
assert_array_equal(np.where(protein.res_dict['isbeta'])[0], isbeta)
assert protein.res_dict['isbeta'].sum() == 8
assert protein.res_dict['isalpha'].sum() == 0
# Protein test
protein = next(oddt.toolkit.readfile('pdb', xiap_receptor))
protein.protein = True
# print(protein.res_dict['resname'])
# print(protein.res_dict['isalpha'])
# for mask_group in np.split(np.argwhere(protein.res_dict['isalpha']).flatten(),
# np.argwhere(np.diff(np.argwhere(protein.res_dict['isalpha']).flatten()) != 1).flatten() + 1):
# print(mask_group + 1, protein.res_dict[mask_group]['resname'])
# print(protein.res_dict['isbeta'])
# for mask_group in np.split(np.argwhere(protein.res_dict['isbeta']).flatten(),
# np.argwhere(np.diff(np.argwhere(protein.res_dict['isbeta']).flatten()) != 1).flatten() + 1):
# print(mask_group + 1, protein.res_dict[mask_group]['resname'])
isalpha = [15, 16, 17, 18, 19, 20, 28, 29, 30, 31, 32, 33, 63, 64, 65, 66,
67, 68, 69, 70, 75, 76, 77, 78, 79, 80, 83, 84, 85, 86, 87, 88,
89, 90, 91, 121, 122, 123, 124, 125, 126, 127, 128]
isbeta = [36, 37, 38, 45, 46, 47, 52, 53, 54]
assert_array_equal(np.where(protein.res_dict['isalpha'])[0], isalpha)
assert_array_equal(np.where(protein.res_dict['isbeta'])[0], isbeta)
assert len(protein.res_dict) == 136
assert protein.res_dict['isalpha'].sum() == 43
assert protein.res_dict['isbeta'].sum() == 9
assert (protein.res_dict['isalpha'] &
protein.res_dict['isbeta']).sum() == 0 # Must be zero!
assert (~protein.res_dict['isalpha'] &
~protein.res_dict['isbeta']).sum() == 84
def test_pdbqt():
"""RDKit PDBQT writer and reader"""
mol = next(oddt.toolkit.readfile('sdf', xiap_actives))
mol2 = oddt.toolkit.readstring('pdbqt', mol.write('pdbqt'))
assert mol.title == mol2.title
# test loop breaks in DFS algorithm
mol = oddt.toolkit.readstring('smi', 'CCc1cc(C)c(C)cc1-c1ccc(-c2cccc(C)c2)cc1')
mol.make3D()
# roundtrip molecule with template
mol2 = oddt.toolkit.readstring('pdbqt', mol.write('pdbqt'))
mol.removeh()
assert len(mol.atoms) == len(mol2.atoms)
def nodes_size(block):
out = OrderedDict()
current_key = None
for line in block.split('\n'):
if line[:4] == 'ROOT' or line[:6] == 'BRANCH':
current_key = line.strip()
out[current_key] = 0
elif line[:4] == 'ATOM':
out[current_key] += 1
return list(out.values())
# check the branch order and size
if oddt.toolkit.backend == 'ob':
assert_array_equal(nodes_size(mol.write('pdbqt')),
[6, 8, 2, 7])
else:
assert_array_equal(nodes_size(mol.write('pdbqt')),
[8, 6, 7, 2])
ligand_file = os.path.join(test_data_dir, 'data', 'dude', 'xiap',
'crystal_ligand.sdf')
mol = next(oddt.toolkit.readfile('sdf', ligand_file))
assert_array_equal(nodes_size(mol.write('pdbqt')),
[8, 3, 6, 6, 1, 6, 3, 2, 2])
# roundtrip a disconnected fragments
mol = oddt.toolkit.readstring('smi', 'c1ccccc1.c1ccccc1C')
if oddt.toolkit.backend == 'ob':
kwargs = {'opt': {'r': None}}
else:
kwargs = {'flexible': False}
mol2 = oddt.toolkit.readstring('pdbqt', mol.write('pdbqt', **kwargs))
assert len(mol.atoms) == len(mol2.atoms)
mol2 = oddt.toolkit.readstring('pdbqt', mol.write('pdbqt'))
assert len(mol.atoms) == len(mol2.atoms)
def test_residue_info():
"""Residue properties"""
mol_file = os.path.join(test_data_dir, 'data', 'pdb', '3kwa_5Apocket.pdb')
mol = next(oddt.toolkit.readfile('pdb', mol_file))
assert len(mol.residues) == 19
res = mol.residues[0]
assert res.idx0 == 0
assert res.number == 92
assert res.chain == 'A'
assert res.name == 'GLN'
def test_canonize_ring_path():
"""Test canonic paths"""
path0 = list(range(6))
path = deque(path0)
path.rotate(3)
assert canonize_ring_path(path) == path0
path.reverse()
assert canonize_ring_path(path) == path0
with pytest.raises(ValueError):
canonize_ring_path(tuple(range(6)))
|
oddt/oddt
|
tests/test_toolkit.py
|
Python
|
bsd-3-clause
| 21,611
|
[
"RDKit"
] |
d96b3677b5f1751762e0564556af1b4142a6990728ac1863e65d01c7b0e21f2c
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.oslogin_v1 import common # type: ignore
from google.cloud.oslogin_v1.services.os_login_service import OsLoginServiceAsyncClient
from google.cloud.oslogin_v1.services.os_login_service import OsLoginServiceClient
from google.cloud.oslogin_v1.services.os_login_service import transports
from google.cloud.oslogin_v1.types import oslogin
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert OsLoginServiceClient._get_default_mtls_endpoint(None) is None
assert (
OsLoginServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
OsLoginServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
OsLoginServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
OsLoginServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
OsLoginServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
)
@pytest.mark.parametrize(
"client_class", [OsLoginServiceClient, OsLoginServiceAsyncClient,]
)
def test_os_login_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "oslogin.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.OsLoginServiceGrpcTransport, "grpc"),
(transports.OsLoginServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_os_login_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class", [OsLoginServiceClient, OsLoginServiceAsyncClient,]
)
def test_os_login_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "oslogin.googleapis.com:443"
def test_os_login_service_client_get_transport_class():
transport = OsLoginServiceClient.get_transport_class()
available_transports = [
transports.OsLoginServiceGrpcTransport,
]
assert transport in available_transports
transport = OsLoginServiceClient.get_transport_class("grpc")
assert transport == transports.OsLoginServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(OsLoginServiceClient, transports.OsLoginServiceGrpcTransport, "grpc"),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
OsLoginServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceClient),
)
@mock.patch.object(
OsLoginServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceAsyncClient),
)
def test_os_login_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(OsLoginServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(OsLoginServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(OsLoginServiceClient, transports.OsLoginServiceGrpcTransport, "grpc", "true"),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(OsLoginServiceClient, transports.OsLoginServiceGrpcTransport, "grpc", "false"),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
OsLoginServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceClient),
)
@mock.patch.object(
OsLoginServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_os_login_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class", [OsLoginServiceClient, OsLoginServiceAsyncClient]
)
@mock.patch.object(
OsLoginServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceClient),
)
@mock.patch.object(
OsLoginServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(OsLoginServiceAsyncClient),
)
def test_os_login_service_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(OsLoginServiceClient, transports.OsLoginServiceGrpcTransport, "grpc"),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_os_login_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
OsLoginServiceClient,
transports.OsLoginServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_os_login_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_os_login_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.oslogin_v1.services.os_login_service.transports.OsLoginServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = OsLoginServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
OsLoginServiceClient,
transports.OsLoginServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
OsLoginServiceAsyncClient,
transports.OsLoginServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_os_login_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"oslogin.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
),
scopes=None,
default_host="oslogin.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("request_type", [oslogin.DeletePosixAccountRequest, dict,])
def test_delete_posix_account(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_posix_account(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeletePosixAccountRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_posix_account_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
client.delete_posix_account()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeletePosixAccountRequest()
@pytest.mark.asyncio
async def test_delete_posix_account_async(
transport: str = "grpc_asyncio", request_type=oslogin.DeletePosixAccountRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_posix_account(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeletePosixAccountRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_posix_account_async_from_dict():
await test_delete_posix_account_async(request_type=dict)
def test_delete_posix_account_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.DeletePosixAccountRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
call.return_value = None
client.delete_posix_account(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_posix_account_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.DeletePosixAccountRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_posix_account(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_posix_account_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_posix_account(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_posix_account_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_posix_account(
oslogin.DeletePosixAccountRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_posix_account_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_posix_account), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_posix_account(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_posix_account_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_posix_account(
oslogin.DeletePosixAccountRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [oslogin.DeleteSshPublicKeyRequest, dict,])
def test_delete_ssh_public_key(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeleteSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_ssh_public_key_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
client.delete_ssh_public_key()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeleteSshPublicKeyRequest()
@pytest.mark.asyncio
async def test_delete_ssh_public_key_async(
transport: str = "grpc_asyncio", request_type=oslogin.DeleteSshPublicKeyRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.DeleteSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_ssh_public_key_async_from_dict():
await test_delete_ssh_public_key_async(request_type=dict)
def test_delete_ssh_public_key_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.DeleteSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
call.return_value = None
client.delete_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_ssh_public_key_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.DeleteSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_ssh_public_key_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_ssh_public_key(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_ssh_public_key_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_ssh_public_key(
oslogin.DeleteSshPublicKeyRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_ssh_public_key_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_ssh_public_key(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_ssh_public_key_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_ssh_public_key(
oslogin.DeleteSshPublicKeyRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [oslogin.GetLoginProfileRequest, dict,])
def test_get_login_profile(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.LoginProfile(name="name_value",)
response = client.get_login_profile(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetLoginProfileRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, oslogin.LoginProfile)
assert response.name == "name_value"
def test_get_login_profile_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
client.get_login_profile()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetLoginProfileRequest()
@pytest.mark.asyncio
async def test_get_login_profile_async(
transport: str = "grpc_asyncio", request_type=oslogin.GetLoginProfileRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.LoginProfile(name="name_value",)
)
response = await client.get_login_profile(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetLoginProfileRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, oslogin.LoginProfile)
assert response.name == "name_value"
@pytest.mark.asyncio
async def test_get_login_profile_async_from_dict():
await test_get_login_profile_async(request_type=dict)
def test_get_login_profile_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.GetLoginProfileRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
call.return_value = oslogin.LoginProfile()
client.get_login_profile(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_login_profile_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.GetLoginProfileRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.LoginProfile()
)
await client.get_login_profile(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_login_profile_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.LoginProfile()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_login_profile(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_login_profile_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_login_profile(
oslogin.GetLoginProfileRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_login_profile_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_login_profile), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.LoginProfile()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.LoginProfile()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_login_profile(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_login_profile_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_login_profile(
oslogin.GetLoginProfileRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [oslogin.GetSshPublicKeyRequest, dict,])
def test_get_ssh_public_key(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey(
key="key_value",
expiration_time_usec=2144,
fingerprint="fingerprint_value",
name="name_value",
)
response = client.get_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, common.SshPublicKey)
assert response.key == "key_value"
assert response.expiration_time_usec == 2144
assert response.fingerprint == "fingerprint_value"
assert response.name == "name_value"
def test_get_ssh_public_key_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
client.get_ssh_public_key()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetSshPublicKeyRequest()
@pytest.mark.asyncio
async def test_get_ssh_public_key_async(
transport: str = "grpc_asyncio", request_type=oslogin.GetSshPublicKeyRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
common.SshPublicKey(
key="key_value",
expiration_time_usec=2144,
fingerprint="fingerprint_value",
name="name_value",
)
)
response = await client.get_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.GetSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, common.SshPublicKey)
assert response.key == "key_value"
assert response.expiration_time_usec == 2144
assert response.fingerprint == "fingerprint_value"
assert response.name == "name_value"
@pytest.mark.asyncio
async def test_get_ssh_public_key_async_from_dict():
await test_get_ssh_public_key_async(request_type=dict)
def test_get_ssh_public_key_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.GetSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
call.return_value = common.SshPublicKey()
client.get_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_ssh_public_key_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.GetSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SshPublicKey())
await client.get_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_ssh_public_key_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_ssh_public_key(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_ssh_public_key_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_ssh_public_key(
oslogin.GetSshPublicKeyRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_ssh_public_key_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SshPublicKey())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_ssh_public_key(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_ssh_public_key_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_ssh_public_key(
oslogin.GetSshPublicKeyRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [oslogin.ImportSshPublicKeyRequest, dict,])
def test_import_ssh_public_key(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.ImportSshPublicKeyResponse()
response = client.import_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.ImportSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, oslogin.ImportSshPublicKeyResponse)
def test_import_ssh_public_key_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
client.import_ssh_public_key()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.ImportSshPublicKeyRequest()
@pytest.mark.asyncio
async def test_import_ssh_public_key_async(
transport: str = "grpc_asyncio", request_type=oslogin.ImportSshPublicKeyRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.ImportSshPublicKeyResponse()
)
response = await client.import_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.ImportSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, oslogin.ImportSshPublicKeyResponse)
@pytest.mark.asyncio
async def test_import_ssh_public_key_async_from_dict():
await test_import_ssh_public_key_async(request_type=dict)
def test_import_ssh_public_key_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.ImportSshPublicKeyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
call.return_value = oslogin.ImportSshPublicKeyResponse()
client.import_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_import_ssh_public_key_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.ImportSshPublicKeyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.ImportSshPublicKeyResponse()
)
await client.import_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_import_ssh_public_key_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.ImportSshPublicKeyResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.import_ssh_public_key(
parent="parent_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
project_id="project_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].ssh_public_key
mock_val = common.SshPublicKey(key="key_value")
assert arg == mock_val
arg = args[0].project_id
mock_val = "project_id_value"
assert arg == mock_val
def test_import_ssh_public_key_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.import_ssh_public_key(
oslogin.ImportSshPublicKeyRequest(),
parent="parent_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
project_id="project_id_value",
)
@pytest.mark.asyncio
async def test_import_ssh_public_key_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = oslogin.ImportSshPublicKeyResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
oslogin.ImportSshPublicKeyResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.import_ssh_public_key(
parent="parent_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
project_id="project_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].ssh_public_key
mock_val = common.SshPublicKey(key="key_value")
assert arg == mock_val
arg = args[0].project_id
mock_val = "project_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_import_ssh_public_key_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.import_ssh_public_key(
oslogin.ImportSshPublicKeyRequest(),
parent="parent_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
project_id="project_id_value",
)
@pytest.mark.parametrize("request_type", [oslogin.UpdateSshPublicKeyRequest, dict,])
def test_update_ssh_public_key(request_type, transport: str = "grpc"):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey(
key="key_value",
expiration_time_usec=2144,
fingerprint="fingerprint_value",
name="name_value",
)
response = client.update_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.UpdateSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, common.SshPublicKey)
assert response.key == "key_value"
assert response.expiration_time_usec == 2144
assert response.fingerprint == "fingerprint_value"
assert response.name == "name_value"
def test_update_ssh_public_key_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
client.update_ssh_public_key()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.UpdateSshPublicKeyRequest()
@pytest.mark.asyncio
async def test_update_ssh_public_key_async(
transport: str = "grpc_asyncio", request_type=oslogin.UpdateSshPublicKeyRequest
):
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
common.SshPublicKey(
key="key_value",
expiration_time_usec=2144,
fingerprint="fingerprint_value",
name="name_value",
)
)
response = await client.update_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == oslogin.UpdateSshPublicKeyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, common.SshPublicKey)
assert response.key == "key_value"
assert response.expiration_time_usec == 2144
assert response.fingerprint == "fingerprint_value"
assert response.name == "name_value"
@pytest.mark.asyncio
async def test_update_ssh_public_key_async_from_dict():
await test_update_ssh_public_key_async(request_type=dict)
def test_update_ssh_public_key_field_headers():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.UpdateSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
call.return_value = common.SshPublicKey()
client.update_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_ssh_public_key_field_headers_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = oslogin.UpdateSshPublicKeyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SshPublicKey())
await client.update_ssh_public_key(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_update_ssh_public_key_flattened():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_ssh_public_key(
name="name_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
arg = args[0].ssh_public_key
mock_val = common.SshPublicKey(key="key_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_ssh_public_key_flattened_error():
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_ssh_public_key(
oslogin.UpdateSshPublicKeyRequest(),
name="name_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_ssh_public_key_flattened_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_ssh_public_key), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = common.SshPublicKey()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SshPublicKey())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_ssh_public_key(
name="name_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
arg = args[0].ssh_public_key
mock_val = common.SshPublicKey(key="key_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_ssh_public_key_flattened_error_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_ssh_public_key(
oslogin.UpdateSshPublicKeyRequest(),
name="name_value",
ssh_public_key=common.SshPublicKey(key="key_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = OsLoginServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = OsLoginServiceClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = OsLoginServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = OsLoginServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = OsLoginServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.OsLoginServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.OsLoginServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.OsLoginServiceGrpcTransport,
transports.OsLoginServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = OsLoginServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.OsLoginServiceGrpcTransport,)
def test_os_login_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.OsLoginServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_os_login_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.oslogin_v1.services.os_login_service.transports.OsLoginServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.OsLoginServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"delete_posix_account",
"delete_ssh_public_key",
"get_login_profile",
"get_ssh_public_key",
"import_ssh_public_key",
"update_ssh_public_key",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_os_login_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.oslogin_v1.services.os_login_service.transports.OsLoginServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.OsLoginServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
),
quota_project_id="octopus",
)
def test_os_login_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.oslogin_v1.services.os_login_service.transports.OsLoginServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.OsLoginServiceTransport()
adc.assert_called_once()
def test_os_login_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
OsLoginServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.OsLoginServiceGrpcTransport,
transports.OsLoginServiceGrpcAsyncIOTransport,
],
)
def test_os_login_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.OsLoginServiceGrpcTransport, grpc_helpers),
(transports.OsLoginServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_os_login_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"oslogin.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
),
scopes=["1", "2"],
default_host="oslogin.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.OsLoginServiceGrpcTransport,
transports.OsLoginServiceGrpcAsyncIOTransport,
],
)
def test_os_login_service_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_os_login_service_host_no_port():
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="oslogin.googleapis.com"
),
)
assert client.transport._host == "oslogin.googleapis.com:443"
def test_os_login_service_host_with_port():
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="oslogin.googleapis.com:8000"
),
)
assert client.transport._host == "oslogin.googleapis.com:8000"
def test_os_login_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.OsLoginServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_os_login_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.OsLoginServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.OsLoginServiceGrpcTransport,
transports.OsLoginServiceGrpcAsyncIOTransport,
],
)
def test_os_login_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.OsLoginServiceGrpcTransport,
transports.OsLoginServiceGrpcAsyncIOTransport,
],
)
def test_os_login_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_posix_account_path():
user = "squid"
project = "clam"
expected = "users/{user}/projects/{project}".format(user=user, project=project,)
actual = OsLoginServiceClient.posix_account_path(user, project)
assert expected == actual
def test_parse_posix_account_path():
expected = {
"user": "whelk",
"project": "octopus",
}
path = OsLoginServiceClient.posix_account_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_posix_account_path(path)
assert expected == actual
def test_ssh_public_key_path():
user = "oyster"
fingerprint = "nudibranch"
expected = "users/{user}/sshPublicKeys/{fingerprint}".format(
user=user, fingerprint=fingerprint,
)
actual = OsLoginServiceClient.ssh_public_key_path(user, fingerprint)
assert expected == actual
def test_parse_ssh_public_key_path():
expected = {
"user": "cuttlefish",
"fingerprint": "mussel",
}
path = OsLoginServiceClient.ssh_public_key_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_ssh_public_key_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "winkle"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = OsLoginServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nautilus",
}
path = OsLoginServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "scallop"
expected = "folders/{folder}".format(folder=folder,)
actual = OsLoginServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "abalone",
}
path = OsLoginServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "squid"
expected = "organizations/{organization}".format(organization=organization,)
actual = OsLoginServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "clam",
}
path = OsLoginServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "whelk"
expected = "projects/{project}".format(project=project,)
actual = OsLoginServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "octopus",
}
path = OsLoginServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "oyster"
location = "nudibranch"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = OsLoginServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "cuttlefish",
"location": "mussel",
}
path = OsLoginServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = OsLoginServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.OsLoginServiceTransport, "_prep_wrapped_messages"
) as prep:
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.OsLoginServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = OsLoginServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = OsLoginServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = OsLoginServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(OsLoginServiceClient, transports.OsLoginServiceGrpcTransport),
(OsLoginServiceAsyncClient, transports.OsLoginServiceGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
|
googleapis/python-oslogin
|
tests/unit/gapic/oslogin_v1/test_os_login_service.py
|
Python
|
apache-2.0
| 99,055
|
[
"Octopus"
] |
ce10646ae387f1c7868b2a7994b80d4d58d05ba184354824fa0579474697846a
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for `tf.data.experimental.rejection_resample()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.data.experimental.ops import resampling
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.platform import test
def _time_resampling(
test_obj, data_np, target_dist, init_dist, num_to_sample):
dataset = dataset_ops.Dataset.from_tensor_slices(data_np).repeat()
# Reshape distribution via rejection sampling.
dataset = dataset.apply(
resampling.rejection_resample(
class_func=lambda x: x,
target_dist=target_dist,
initial_dist=init_dist,
seed=142))
get_next = dataset_ops.make_one_shot_iterator(dataset).get_next()
with test_obj.test_session() as sess:
start_time = time.time()
for _ in xrange(num_to_sample):
sess.run(get_next)
end_time = time.time()
return end_time - start_time
class RejectionResampleBenchmark(test.Benchmark):
"""Benchmarks for `tf.data.experimental.rejection_resample()`."""
def benchmarkResamplePerformance(self):
init_dist = [0.25, 0.25, 0.25, 0.25]
target_dist = [0.0, 0.0, 0.0, 1.0]
num_classes = len(init_dist)
# We don't need many samples to test a dirac-delta target distribution
num_samples = 1000
data_np = np.random.choice(num_classes, num_samples, p=init_dist)
resample_time = _time_resampling(
self, data_np, target_dist, init_dist, num_to_sample=1000)
self.report_benchmark(iters=1000, wall_time=resample_time, name="resample")
if __name__ == "__main__":
test.main()
|
asimshankar/tensorflow
|
tensorflow/python/data/experimental/benchmarks/rejection_resample_benchmark.py
|
Python
|
apache-2.0
| 2,449
|
[
"DIRAC"
] |
fd06cdbe75bd9ee434bdbf2065f3323f00c9e3277250b5b614fe33a72d948ff4
|
#!/usr/bin/env python
'''
TracPy class
'''
import tracpy
import numpy as np
from . import tracmass
from matplotlib.mlab import find
class Tracpy(object):
"""TracPy class."""
def __init__(self, currents_filename, grid, nsteps=1, ndays=1, ff=1,
tseas=3600., ah=0., av=0., z0='s', zpar=1, do3d=0, doturb=0,
name='test', dostream=0, N=1,
time_units='seconds since 1970-01-01', dtFromTracmass=None,
zparuv=None, tseas_use=None, savell=True, doperiodic=0,
usespherical=True, ellps='WGS84'):
"""Initialize class.
Note: GCM==General Circulation Model, meaning the predicted u/v
velocity fields that are input into TracPy to run the drifters.
Args:
currents_filename (str or List[str]): NetCDF file name
(with extension), list of file names, or OpenDAP url to GCM
output.
grid (object): class containing all necessary grid information,
as calculated from tracpy.inout.readgrid().
nsteps (Optional[int]): sets the max time step between GCM model
outputs between drifter steps. (iter in TRACMASS) Does not
control the output sampling anymore. The velocity fields are
assumed frozen while a drifter is stepped through a given grid
cell. nsteps can force the reinterpolation of the fields by
setting the max time before reinterpolation. Defaults to 1.
ndays (Optional[float]): number of days to run for drifter tracks
from start date. Defaults to 1.
ff (Optional[int]): 1 is forward in time, -1 is backward.
Defaults to 1.
tseas (Optional[float]): number of seconds between GCM model
outputs. Defaults to 3600.
ah (Optional[float]): horizontal diffusivity, in m^2/s. Only used
if doturb!=0. Defaults to 0.
av (Optional[float]): vertical diffusivity, in m^2/s. Only used
if doturb!=0 and do3d==1. Defaults to 0.
z0 (Optional[str or array]): string flag in 2D case or array of
initial z locations in 3D case. Defaults to 's'.
zpar (Optional[int or str]): isoslice value to use in 2D case or
string flag in 3D case. Default is 1.
For 3D drifter movement, use do3d=1, and z0 should be an array
of initial drifter depths. The array should be the same size as
lon0 and be negative for under water. Currently drifter depths
need to be above the seabed for every x,y particle location for
the script to run.
To do 3D but start at surface, use z0=zeros(ia.shape) and have
either zpar = 'fromMSL'
choose fromMSL to have z0 starting depths be for that depth
below the base time-independent sea level (or mean sea level).
choose 'fromZeta' to have z0 starting depths be for that depth
below the time-dependent sea surface.
Haven't quite finished the 'fromZeta' case.
For 2D drifter movement, turn on twodim flag in makefile.
Then:
set z0 to 's' for 2D along a terrain-following slice
and zpar to be the index of s level you want to use (0 to km-1)
set z0 to 'rho' for 2D along a density surface
and zpar to be the density value you want to use
Can do the same thing with salinity ('salt') or temperature
('temp')
The model output doesn't currently have density though.
set z0 to 'z' for 2D along a depth slice
and zpar to be the constant (negative) depth value you want to
use
To simulate drifters at the surface, set z0 to 's'
and zpar = grid['km']-1 to put them in the upper s level
do3d (Optional[int]): 1 for 3D or 0 for 2D. Default is 0.
doturb (Optional[int]): 0 for no added diffusion, 1 for diffusion
via velocity fluctuation, 2/3 for diffusion via random walk (3
for aligned with isobaths). Default is 0.
name (Optional[str]): name for output. Default is 'test'.
dostream (Optional[int]): 1 to calculate transport for lagrangian
stream functions, 0 to not. Default is 0.
N (Optional[int]): number of steps between GCM model outputs for
outputting drifter locations. Defaults to output at nsteps.
If dtFromTracmass is being used, N is set by that.
time_units (Optional[int]): Reference for time, for changing
between numerical times and datetime format. Default is
'seconds since 1970-01-01'.
dtFromTracmass: Time period for exiting from TRACMASS. If
uninitialized, this is set to tseas so that it only exits
TRACMASS when it has gone through a full model output. If
initialized by the user, TRACMASS will run for 1 time step of
length dtFromTracmass before exiting to the loop.
zparuv: Defaults to zpar. Use this if the k index for the model
output fields (e.g, u, v) is different from the k index in the
grid This might happen if, for example, only the surface current
were saved, but the model run originally did have many layers.
This parameter represents the k index for the u and v output,
not for the grid.
tseas_use: Defaults to tseas. Desired time between outputs in
seconds, as opposed to the actual time between outputs (tseas).
Should be >= tseas since this is just an ability to use model
output at less frequency than is available, probably just for
testing purposes or matching other models. Should be a multiple
of tseas (or will be rounded later).
savell (Optional[bool]): True to save drifter tracks in lon/lat
and False to save them in grid coords. Default is True.
doperiodic (Optional[int]): Whether to use periodic boundary
conditions for drifters and, if so, on which walls. Default is 0.
0: do not use periodic boundary conditions
1: use a periodic boundary condition in the east-west/x/i
direction
2: use a periodic boundary condition in the north-south/y/j
direction
usespherical (Optional[bool]): True if want to use spherical
(lon/lat) coordinates and False for idealized applications where
it isn't necessary to project from spherical coordinates.
Default is True.
"""
self.currents_filename = currents_filename
self.grid = grid
# Initial parameters
self.nsteps = nsteps
self.ndays = ndays
self.ff = ff
self.tseas = float(tseas)
self.ah = ah
self.av = av
self.z0 = z0
self.zpar = zpar
self.do3d = do3d
self.doturb = doturb
self.name = name
self.dostream = dostream
self.N = N
self.time_units = time_units
self.savell = savell
self.doperiodic = doperiodic
self.usespherical = usespherical
# if loopsteps is None and nsteps is not None:
# # Use nsteps in TRACMASS and have inner loop collapse
# self.loopsteps = 1
# elif loopsteps is not None and nsteps is None:
# # This means to use the inner loop (with loopsteps) and nsteps=1
# # to just do 1 step per call to TRACMASS
# self.nsteps = 1
# elif loopsteps is None and nsteps is None:
# print 'need to input a value for nsteps or loopsteps.'
# break
if dtFromTracmass is None:
self.dtFromTracmass = tseas
else:
# If using dtFromTracmass, N=1, for steps between tracmass exits
self.N = 1
# # If using dtFromTracmass, N is set according to that.
# # this is the total number of model_step_is_done
# self.N = (self.ndays*3600*24.)/self.tseas
self.dtFromTracmass = dtFromTracmass
# Find number of interior loop steps in case dtFromTracmass is not
# equal to tseas
# NEEDS TO BE EVEN NUMBER FOR NOW: NEED TO GENERALIZE THIS LATER
self.nsubsteps = int(self.tseas/self.dtFromTracmass)
if zparuv is None:
self.zparuv = zpar
else:
self.zparuv = zparuv
if tseas_use is None:
self.tseas_use = tseas
# Calculate parameters that derive from other parameters
# Number of model outputs to use (based on tseas, actual amount of
# model output)
# This should not be updated with tstride since it represents the full
# amount of indices in the original model output. tstride will be used
# separately to account for the difference.
# Adding one index so that all necessary indices are captured by this
# number.
# Then the run loop uses only the indices determined by tout instead
# of needing an extra one beyond now rounding up instead of down
self.tout = np.int(np.ceil((ndays*(24*3600))/tseas + 1))
# Calculate time outputs stride. Will be 1 if want to use all model
# output.
self.tstride = int(self.tseas_use/self.tseas) # will round down
# For later use
# fluxes
self.uf = None
self.vf = None
self.dzt = None
self.zrt = None
self.zwt = None
def prepare_for_model_run(self, date, lon0, lat0):
"""Get everything ready so that we can get to the simulation."""
# # Convert date to number
# date = netCDF.date2num(date, self.time_units)
# Figure out what files will be used for this tracking
nc, tinds = tracpy.inout.setupROMSfiles(self.currents_filename, date,
self.ff, self.tout,
self.time_units,
tstride=self.tstride)
# Interpolate to get starting positions in grid space
# convert from assumed input lon/lat coord locations to grid space
if self.usespherical:
xstart0, ystart0, _ = tracpy.tools.interpolate2d(lon0, lat0,
self.grid,
'd_ll2ij')
# assume input seed locations are in projected/idealized space and
# change to index space
else:
xstart0, ystart0, _ = tracpy.tools.interpolate2d(lon0, lat0,
self.grid,
'd_xy2ij')
# Do z a little lower down
# Initialize seed locations
# these will be used as indices so must be ints
ia = np.ceil(xstart0).astype(int)
ja = np.ceil(ystart0).astype(int)
# don't use nan's
# pdb.set_trace()
ind2 = ~np.isnan(ia) * ~np.isnan(ja)
ia = ia[ind2]
ja = ja[ind2]
xstart0 = xstart0[ind2]
ystart0 = ystart0[ind2]
# check for point being masked
# only keep unmasked drifter locations
unmasked = np.where(self.grid.mask_rho[ja, ia] == 1)[0]
ia = ia[unmasked]
ja = ja[unmasked]
xstart0 = xstart0[unmasked]
ystart0 = ystart0[unmasked]
if 'ocean_time' in nc.variables:
dates = nc.variables['ocean_time'][:]
elif 'time' in nc.variables:
dates = nc.variables['time'][:]
# time at start of drifter test from file in seconds since 1970-01-01
# add this on at the end since it is big
t0save = dates[tinds[0]]
# Initialize drifter grid positions and indices
xend = np.ones((ia.size, (len(tinds)-1)*self.N+1))*np.nan
yend = np.ones((ia.size, (len(tinds)-1)*self.N+1))*np.nan
zend = np.ones((ia.size, (len(tinds)-1)*self.N+1))*np.nan
zp = np.ones((ia.size, (len(tinds)-1)*self.N+1))*np.nan
ttend = np.ones((ia.size, (len(tinds)-1)*self.N+1))
# initialize all exit flags for in the domain
flag = np.zeros((ia.size), dtype=np.int)
# Initialize vertical stuff and fluxes
# Read initial field in - to 'new' variable since will be moved
# at the beginning of the time loop ahead
lx = self.grid.imt
ly = self.grid.jmt
try:
lk = self.grid.sc_r.size
except:
lk = 2
# Now that we have the grid, initialize the info for the two
# bounding model steps using the grid size
self.uf = np.ones((2, lk-1, ly, lx-1))*np.nan
self.vf = np.ones((2, lk-1, ly-1, lx))*np.nan
self.dzt = np.ones((2, lk-1, ly, lx))*np.nan
self.zrt = np.ones((2, lk-1, ly, lx))*np.nan
self.zwt = np.ones((2, lk, ly, lx))*np.nan
if isinstance(self.z0, str): # isoslice case
self.uf[1, :, :, :], self.vf[1, :, :, :], \
self.dzt[1, :, :, :], self.zrt[1, :, :, :], \
self.zwt[1, :, :, :] = \
tracpy.inout.readfields(tinds[0], self.grid, nc, self.z0,
self.zpar, zparuv=self.zparuv)
else: # 3d case
self.uf[1, :, :, :], self.vf[1, :, :, :], \
self.dzt[1, :, :, :], self.zrt[1, :, :, :], \
self.zwt[1, :, :, :] = \
tracpy.inout.readfields(tinds[0], self.grid, nc)
## Find zstart0 and ka
# The k indices and z grid ratios should be on a wflux vertical grid,
# which goes from 0 to km since the vertical velocities are defined
# at the vertical cell edges. A drifter's grid cell is vertically
# bounded above by the kth level and below by the (k-1)th level
if isinstance(self.z0, str): # then doing a 2d isoslice
# there is only one vertical grid cell, but with two vertically-
# bounding edges, 0 and 1, so the initial ka value is 1 for all
# isoslice drifters.
ka = np.ones(ia.size)
# for s level isoslice, place drifters vertically at the center
# of the grid cell since that is where the u/v flux info is from.
# For a rho/temp/density isoslice, we treat it the same way, such
# that the u/v flux info taken at a specific rho/temp/density
# value is treated as being at the center of the grid cells
# vertically.
zstart0 = np.ones(ia.size)*0.5
else: # 3d case
# Convert initial real space vertical locations to grid space
# first find indices of grid cells vertically
ka = np.ones(ia.size, dtype=int)*-999 # need int placeholder
zstart0 = np.ones(ia.size)*np.nan
if self.zpar == 'fromMSL':
# print 'zpar==''fromMSL'' not implemented yet...'
raise NotImplementedError("zpar==''fromMSL'' not implemented\
yet...")
# for i in xrange(ia.size):
# # pdb.set_trace()
# ind = (self.grid['zwt0'][ia[i],ja[i],:]<=self.z0[i])
# # check to make sure there is at least one true value, so the z0 is shallower than the seabed
# if np.sum(ind):
# ka[i] = find(ind)[-1] # find value that is just shallower than starting vertical position
# # if the drifter starting vertical location is too deep for the x,y location, complain about it
# else: # Maybe make this nan or something later
# print 'drifter vertical starting location is too deep for its x,y location. Try again.'
# if (self.z0[i] != self.grid['zwt0'][ia[i],ja[i],ka[i]]) and (ka[i] != self.grid['km']): # check this
# ka[i] = ka[i]+1
# # Then find the vertical relative position in the grid cell by adding on the bit of grid cell
# zstart0[i] = ka[i] - abs(self.z0[i]-self.grid['zwt0'][ia[i],ja[i],ka[i]]) \
# /abs(self.grid['zwt0'][ia[i],ja[i],ka[i]-1]-self.grid['zwt0'][ia[i],ja[i],ka[i]])
elif self.zpar == 'fromZeta':
# In this case, the starting z values of the drifters are
# found in grid space as z0 below the z surface for each
# drifter
for i in range(ia.size):
ind = (self.zwt[1, :, ja[i], ia[i]] <= self.z0[i])
# find value that is just shallower than starting vertical
# position
ka[i] = find(ind)[-1]
if (self.z0[i] != self.zwt[1, ka[i], ja[i], ia[i]]) and \
(ka[i] != self.grid.km): # check this
ka[i] = ka[i]+1
# Then find the vertical relative position in the grid
# cell by adding on the bit of grid cell
zstart0[i] = ka[i] - \
abs(self.z0[i]-self.zwt[1, ka[i], ja[i], ia[i]]) \
/ abs(self.zwt[1, ka[i]-1, ja[i], ia[i]] -
self.zwt[1, ka[i], ja[i], ia[i]])
# Find initial cell depths to concatenate to beginning of drifter
# tracks later
# zsave = tracpy.tools.interpolate3d(xstart0, ystart0, zstart0,
# self.zwt[:, :, :, 1])
# Initialize x,y,z with initial seeded positions
xend[:, 0] = xstart0
yend[:, 0] = ystart0
zend[:, 0] = zstart0
return tinds, nc, t0save, xend, yend, zend, zp, ttend, flag
def prepare_for_model_step(self, tind, nc, flag, xend, yend, zend, j,
nsubstep, T0):
"""Already in a step, get ready to actually do step"""
xstart = xend[:, j*self.N]
ystart = yend[:, j*self.N]
zstart = zend[:, j*self.N]
# mask out drifters that have exited the domain
xstart = np.ma.masked_where(flag[:] == 1, xstart)
ystart = np.ma.masked_where(flag[:] == 1, ystart)
zstart = np.ma.masked_where(flag[:] == 1, zstart)
if T0 is not None:
T0 = np.ma.masked_where(flag[:] == 1, T0)
# Move previous new time step to old time step info
self.uf[0, :, :, :] = self.uf[1, :, :, :].copy()
self.vf[0, :, :, :] = self.vf[1, :, :, :].copy()
self.dzt[0, :, :, :] = self.dzt[1, :, :, :].copy()
self.zrt[0, :, :, :] = self.zrt[1, :, :, :].copy()
self.zwt[0, :, :, :] = self.zwt[1, :, :, :].copy()
# Read stuff in for next time loop
if isinstance(self.z0, str): # isoslice case
self.uf[1, :, :, :], self.vf[1, :, :, :], self.dzt[1, :, :, :], \
self.zrt[1, :, :, :], self.zwt[1, :, :, :] = \
tracpy.inout.readfields(tind, self.grid, nc, self.z0,
self.zpar, zparuv=self.zparuv)
else: # 3d case
self.uf[1, :, :, :], self.vf[1, :, :, :], self.dzt[1, :, :, :], \
self.zrt[1, :, :, :], self.zwt[1, :, :, :] = \
tracpy.inout.readfields(tind, self.grid, nc)
# Find the fluxes of the immediately bounding range for the desired
# time step, which can be less than 1 model output
# SHOULD THIS BE PART OF SELF TOO? Leave uf and vf as is, though,
# because they may be used for interpolating the input fluxes for
# substeps.
ufsub = np.ones(self.uf.shape)*np.nan
vfsub = np.ones(self.vf.shape)*np.nan
# for earlier bounding flux info
rp = nsubstep/self.nsubsteps # weighting for later time step
rm = 1 - rp # timing for earlier time step
ufsub[0, :, :, :] = rm*self.uf[0, :, :, :] + rp*self.uf[1, :, :, :]
vfsub[0, :, :, :] = rm*self.vf[0, :, :, :] + rp*self.vf[1, :, :, :]
# for later bounding flux info
rp = (nsubstep+1)/self.nsubsteps # weighting for later time step
rm = 1 - rp # timing for earlier time step
ufsub[1, :, :, :] = rm*self.uf[0, :, :, :] + rp*self.uf[1, :, :, :]
vfsub[1, :, :, :] = rm*self.vf[0, :, :, :] + rp*self.vf[1, :, :, :]
# Change the horizontal indices from python to fortran indexing
# (vertical are zero-based in tracmass)
xstart, ystart = tracpy.tools.convert_indices('py2f', xstart, ystart)
# make flux fields masked arrays
ufsub = np.ma.masked_where(ufsub>1e30, ufsub)
vfsub = np.ma.masked_where(vfsub>1e30, vfsub)
return xstart, ystart, zstart, ufsub, vfsub, T0
def step(self, xstart, ystart, zstart, ufsub, vfsub, T0, U, V):
"""
Take some number of steps between a start and end time.
FIGURE OUT HOW TO KEEP TRACK OF TIME FOR EACH SET OF LINES
FILL IN
Transpose arrays when sent to Fortran.
"""
if T0 is not None:
xend, yend, zend, flag,\
ttend, U, V = tracmass.step(np.ma.compressed(xstart),
np.ma.compressed(ystart),
np.ma.compressed(zstart),
self.tseas_use, ufsub.T, vfsub.T,
self.ff,
self.grid.kmt.astype(int).T,
self.dzt.T, self.grid.dxdy.T,
self.grid.dxv.T,
self.grid.dyu.T, self.grid.h.T,
self.nsteps, self.ah, self.av,
self.do3d, self.doturb,
self.doperiodic, self.dostream,
self.N, t0=np.ma.compressed(T0),
ut=U.T, vt=V.T)
else:
xend, yend, zend, flag,\
ttend, U, V = tracmass.step(np.ma.compressed(xstart),
np.ma.compressed(ystart),
np.ma.compressed(zstart),
self.tseas_use, ufsub.T, vfsub.T,
self.ff,
self.grid.kmt.astype(int).T,
self.dzt.T, self.grid.dxdy.T,
self.grid.dxv.T,
self.grid.dyu.T, self.grid.h.T,
self.nsteps, self.ah, self.av,
self.do3d, self.doturb,
self.doperiodic, self.dostream,
self.N)
# return the new positions or the delta lat/lon
return xend, yend, zend, flag, ttend, U, V
def model_step_is_done(self, xend, yend, zend, ttend, tstart):
"""Stuff to do after a call to TRACMASS"""
# Add initial step time to ttend
ttend = (ttend.T + tstart).T
# Change the horizontal indices from python to fortran indexing
xend, yend = tracpy.tools.convert_indices('f2py', xend, yend)
# Skip calculating real z position if we are doing surface-only
# drifters anyway
if self.z0 != 's' and self.savell:
# if self.z0 != 's' and self.zpar != self.grid.km-1:
# Calculate real z position
# linear time interpolation constant that is used in tracmass
r = np.linspace(1./self.N, 1, self.N)
for n in range(self.N): # loop through time steps
# interpolate to a specific output time
# pdb.set_trace()
zwt = (1.-r[n])*self.zwt[0, :, :, :] + \
r[n]*self.zwt[1, :, :, :]
# mask out land
zwt = np.ma.masked_where(zwt>1e30, zwt)
zp, dt = tracpy.tools.interpolate3d(xend, yend, zend, zwt)
else:
zp = zend
# return the new positions or the delta lat/lon
return xend, yend, zend, zp, ttend
def finishSimulation(self, ttend, t0save, xend, yend, zp, T0, U, V):
"""
Wrap up simulation.
Note:
Not doing transport yet.
"""
ttend = ttend + t0save # add back in base time in seconds
## map coordinates interpolation if saving tracks as lon/lat
if self.savell:
if self.usespherical:
lonp, latp, dt = tracpy.tools.interpolate2d(xend, yend,
self.grid,
'm_ij2ll',
mode='constant',
cval=np.nan)
else:
lonp, latp, dt = tracpy.tools.interpolate2d(xend, yend,
self.grid,
'm_ij2xy',
mode='constant',
cval=np.nan)
else:
# rename grid index locations as lon/lat to fit in with save
# syntax below
lonp = xend
latp = yend
# Save results to netcdf file
tracpy.inout.savetracks(lonp, latp, zp, ttend, self.name, self.nsteps,
self.N, self.ff, self.tseas_use, self.ah,
self.av, self.do3d, self.doturb,
self.currents_filename, self.doperiodic,
self.time_units, T0, U, V, savell=self.savell)
return lonp, latp, zp, ttend, T0, U, V
|
kthyng/tracpy
|
tracpy/tracpy_class.py
|
Python
|
mit
| 26,569
|
[
"NetCDF"
] |
7442dc9f80d10895851e32cfecaf1478985fa02242ddd4ce1deceadf9e7b7d81
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper class for a gym-like environment."""
import functools
import os
from absl import logging
import gin
import gym
import numpy as np
from scipy.stats import multivariate_normal
from polish.mcts import mcts_player
from polish.utils import tf_utils
import polish.utils.running_mean_std as running_mean_std
@gin.configurable
class MCTSEnv(object):
"""A gym-like environment for building trajectories.
Attributes:
env: game environment (should support gym environment APIs, `step, reset`).
estimator: rollout policy (value and policy network) TF estimator.
serving_input_fn: Input function for model predictions.
clip_ob: Clip value for obesrvations (states).
clip_rew: Clip value for rewards.
epsilon: Epsilon value used to avoid zero-division.
obs_normalized: Whether to normalize environment observations.
reward_normalized: Whether to normalize environment rewards.
env_states: Array for MuJoCo environment internal states.
trajectory_states: Array for trajectory states.
trajectory_actions: Array for trajectory actions.
trajectory_values: Array for trajectory state-values.
trajectory_returns: Array for trajectory returns.
trajectory_means: Array for current policy mean values.
trajectory_dones: Array for indicating whether a state is a terminal state.
trajectory_logstds: Array for current policy logstd values.
trajectory_neg_logprobs: Array for trajectory negative log probabilities.
trajectory_per_episode_rewards: Array for trajectory `episode` rewards. Each
trajectoy may contain multiple episodes.
trajectory_per_episode_lengths: Array for trajectory `episode` lengths. Each
trajectory may contain multiple episodes.
trajectory_per_step_rewards: Array for trajectory rewards (per step).
mcts_player: An MCTSPlayer instance for generating MCTS rollouts.
mcts_sampling: If True, the current iteration uses MCTS to generate
demonstration data.
"""
def __init__(self,
env,
estimator,
serving_input_fn,
gamma=0.99,
lam=0.95,
tanh_action_clipping=False,
obs_normalized=True,
reward_normalized=True,
clip_ob=10.,
clip_rew=10.,
epsilon=1e-8,
mcts_enable=False,
num_envs=1,
mcts_start_step_frac=0.1,
mcts_end_step_frac=0.9,
num_iterations=156160,
mcts_sim_decay_factor=0.8,
mcts_sampling_frac=0.1,
mcts_collect_data_freq=1,
random_action_sampling_freq=0.0,
checkpoint_dir=None):
"""Creates a gym-like environment with some added functionalities.
Args:
env: an instance of gym environment.
estimator: a TF estimator instance used to call `prediction` on.
serving_input_fn: the serving input function specifies what the caller of
the estimator `predict` method must provide. the `serving_input_fn`
tells the model what data it has to get from the user.
gamma: the discount factor multiplied by future rewards from the
environment. gamma value is generally used to dampen the effect of
future reward on the agent's choice. That is, gamma value makes future
rewards are worth less than immediate rewards.
lam: Generalized Advantage Estimator (GAE) parameter.
tanh_action_clipping: If set, performs tanh action clipping. Enabling tanh
action clipping bound the actions to [-1, 1]. See
https://arxiv.org/pdf/1801.01290.pdf for details.
obs_normalized: if True, the observations from environment are normalized.
reward_normalized: if True, the rewards from environment are normalized.
clip_ob: the range for clipping the observations. The observations are
clipped into the range [-clip_ob, clip_ob] after normalization.
clip_rew: the range for clipping the rewards. The rewards are clipped into
the range [-clip_rew, clip_rew] after normalization.
epsilon: an infinitesimal value used to prevent divide-by-zero in
normalizing the data.
mcts_enable: if True, the samples are taken from MCTS simulations.
num_envs: indicates the number of parallel environments in MCTS player.
mcts_start_step_frac: The sampling step at which MCTS sampling starts.
mcts_end_step_frac: The sampling step at which MCTS sampling stops.
num_iterations: total number of training iterations (including epochs).
mcts_sim_decay_factor: decay number of MCTS simulations with this value.
mcts_sampling_frac: across all the data samples this fraction of MCTS
sampling occurs.
mcts_collect_data_freq: As MCTS is costly, we do not want to collect MCTS
data for each training iteration. Instead, every
`mcts_collect_data_freq`, we perform MCTS sampling.
random_action_sampling_freq: the percentage of the children's move that
are exploratory (completely random).
checkpoint_dir: use checkpoint dir and create 'mcts_data' this directory
for holding MCTS data.
"""
if random_action_sampling_freq < 0.0 or random_action_sampling_freq > 1.0:
raise ValueError('ranom_action_sampling_freq should be '
'between [0.0, 1.0]!')
self.env = env
self.estimator = estimator
self.serving_input_fn = serving_input_fn
# Private attributes.
self._policy = None
self._last_value = None
self._last_done = None
self._gym_state = None
self._episode_length = 0
self._episode_reward = 0.
self._env_done = False
self._tanh_action_clipping = tanh_action_clipping
self._gamma = gamma
self._lam = lam
self._mcts_enable = mcts_enable
self._first_time_call = True
self._num_envs = num_envs
self._sampling_step = 0
self._num_mcts_samples = 0
self._random_gen = np.random
self._random_action_sampling_freq = random_action_sampling_freq
self._mcts_data_dir = os.path.join(checkpoint_dir, 'mcts_data')
if (mcts_start_step_frac < 0.0 or mcts_start_step_frac > 1.5):
raise ValueError(
'MCTS start step should be a value between 0.0 and 1.0'
' indicating after what fraction of data sampling we should switch'
' to MCTS sampling')
if (mcts_end_step_frac < 0.0 or mcts_end_step_frac > 1.5):
raise ValueError(
'MCTS end step should be a value between 0.0 and 1.0'
' indicating after what fraction of data sampling we should stop'
' MCTS sampling')
if mcts_end_step_frac <= mcts_start_step_frac:
raise ValueError('MCTS end step should be greater than MCTS start step')
if mcts_sampling_frac > 1.0:
raise ValueError(
'Among all the sampling iterations this fraction of'
' MCTS sampling occurs. Negative value indicates no MCTS sampling.')
if mcts_collect_data_freq < 1.0:
raise ValueError(
'mcts_collect_data_freq must be greater than one. That is, '
' how many times to perform MCTS sampling.')
self._num_iterations = num_iterations
self._mcts_start_step_frac = int(mcts_start_step_frac *
self._num_iterations)
self._mcts_end_step_frac = int(mcts_end_step_frac * self._num_iterations)
self._mcts_sampling_frac = mcts_sampling_frac
self._mcts_collect_data_freq = mcts_collect_data_freq
self._mcts_sim_decay_factor = mcts_sim_decay_factor
# Observation and return normalizers.
self._ob_rms = running_mean_std.RunningMeanStd(
shape=(1, self.env.observation_space.shape[0]))
self._ret_rms = running_mean_std.RunningMeanStd(shape=())
# Return placeholder.
self._ret = np.zeros(1)
# Property getter/setter.
self._clip_ob = clip_ob
self._clip_rew = clip_rew
self._epsilon = epsilon
self._obs_normalized = obs_normalized
self._reward_normalized = reward_normalized
self.mcts_sampling = False
if self._mcts_enable:
self.prepare_mcts_player()
self.reset()
self.initialize_trajectory_data()
@property
def clip_ob(self):
return self._clip_ob
@clip_ob.setter
def clip_ob(self, clip_ob):
self._clip_ob = clip_ob
@property
def clip_rew(self):
return self._clip_rew
@clip_rew.setter
def clip_rew(self, clip_rew):
self._clip_rew = clip_rew
@property
def epsilon(self):
return self._epsilon
@epsilon.setter
def epsilon(self, epsilon):
self._epsilon = epsilon
@property
def obs_normalized(self):
return self._obs_normalized
@obs_normalized.setter
def obs_normalized(self, obs_normalized):
self._obs_normalized = obs_normalized
@property
def reward_normalized(self):
return self._reward_normalized
@reward_normalized.setter
def reward_normalized(self, reward_normalized):
self._reward_normalized = reward_normalized
def initialize_trajectory_data(self):
"""Initialize trajectory data to empty lists."""
# Variables for one trajectory.
# Each trajectory may consist of multiple episodes.
self.trajectory_states = []
self.trajectory_actions = []
self.trajectory_values = []
self.trajectory_returns = []
self.trajectory_means = []
self.trajectory_logstds = []
self.trajectory_neg_logprobs = []
self.trajectory_per_episode_rewards = []
self.trajectory_per_episode_lengths = []
self.trajectory_per_step_rewards = []
self.trajectory_dones = []
self.env_states = []
def step(self, action):
"""Take one action in the environment.
Args:
action: action to be taken on the environment.
Returns:
state: next state.
reward: reward.
done: final state.
info: information about the state.
"""
state, reward, done, _ = self.env.step(action)
return state, reward, done, _
def reset(self):
"""Reset the environment to an initial state.
Returns:
The initial state of the environment.
"""
# Reset the environment
self._gym_state = self.env.reset()
# Reset return normalizer to zero
self._ret = np.zeros(1)
# Normalize observation
self._gym_state = self._norm_clip_ob(np.asarray([self._gym_state]))[0]
return self._gym_state
def prepare_mcts_player(self):
"""Initializes variables for MCTS sampling.
This function is called during initialization of the `Env` class, only if
`mcts_enable` is true.
"""
# Retrieve the environment name.
env_name = self.env.unwrapped.spec.id
env_constructor = functools.partial(gym.make, env_name)
# Parallel environments used in MCTS simulation during
# expand and evaluate phase.
temp_env = env_constructor()
env_action_space = temp_env.action_space.shape[0]
envs = [
env_constructor() for _ in range(self._num_envs)
]
self._tree_env = envs
self.mcts_player = mcts_player.MCTSPlayer(
tree_env=self._tree_env,
call_policy=self.call_policy,
max_episode_steps=1000,
env_action_space=env_action_space,
num_envs=self._num_envs)
self._current_mcts_player = self.mcts_player
def call_policy(self, state, only_normalized=False):
"""Run policy on a state.
Args:
state: state tensor [Batch, *].
only_normalized: if true, the input data are normalized and clipped.
Returns:
action, value, neg_logprob, mu, var in tensor [Batch, *].
"""
estimator_prediction = {}
# This check is for MCTSPlayer during simulation phase.
# We do not want to update observation runnign mean and std (`_ob_rms`)
# for the environment observations during MCTS simulations step.
if only_normalized:
state = self._norm_clip_ob(state, update_rms=False)
# Call TF estimator predictor and retrieve the predictions:
# `action`: sampled action from the policy distribution.
# `value`: state-value for the given state.
# `neg_logprob`: negative log of probability distribution function (pdf)
# of the sampled action.
# `mean`: mean value of the policy distibution.
# `logstd`: log of standard deviation of the policy distribution.
estimator_out = self._policy({'mcts_features': state,
'policy_features': state})
estimator_prediction['action'] = estimator_out['action']
estimator_prediction['value'] = estimator_out['value']
estimator_prediction['neg_logprob'] = estimator_out['neg_logprob']
estimator_prediction['mean'] = estimator_out['mean']
estimator_prediction['logstd'] = estimator_out['logstd']
return estimator_prediction
def update_action(self, input_action):
"""Update the action value.
Args:
input_action: input action array.
Returns:
updated action value after tanh clipping (if applicable).
"""
# tanh action clipping is a technique to map infinite action space
# from gaussian distribution to [-1,1]
# https://arxiv.org/pdf/1801.01290.pdf
if self._tanh_action_clipping:
return np.tanh(input_action)
else:
return input_action
def _norm_clip_ob(self, obs, update_rms=True):
"""Observation normalization and clipping.
Args:
obs: observation tensor from environment [*, state_size].
update_rms: if true, observation running mean gets updated.
Returns:
normalized and clipped observation.
"""
assert isinstance(obs, np.ndarray), ('The observation array MUST be a '
'numpy array.')
if update_rms:
self._ob_rms.update(obs)
if self.obs_normalized:
obs = np.clip(
(obs - self._ob_rms.mean) / np.sqrt(self._ob_rms.var + self.epsilon),
0. - self.clip_ob, self.clip_ob)
return obs
return obs
def mcts_initialization(self, init_state=None, init_action=None):
"""Initialize MCTS player and expand/evaluate root node."""
self._current_mcts_player.initialize_game(init_state)
# At the beginning, we expand the root (first node of the tree).
# This step is necessary as we do not have any other basis to select
# a child from root.
first_node = self._current_mcts_player.root.select_leaf()
# Retrieve the root observation.
self._gym_state = first_node.observ
# Normalize and clip the initial observation (root observation).
self._gym_state = self._norm_clip_ob(np.asarray([self._gym_state]))[0]
# Call the policy/state-value network (using tf.estimator).
policy_out = self.call_policy([self._gym_state])
# Update first node state-value.
first_node.network_value = policy_out['value'][0]
# Create a Multivariate Normal Distribution from the given `mean` and
# `logstd`. This distribution is a replica of the policy distribution that
# exists in the tf.estimator. We need this distribtion to sample a set
# of actions.
mcts_dist = multivariate_normal(
mean=policy_out['mean'][0],
cov=np.diag(np.power(np.exp(policy_out['logstd'][0]), 2)))
sampled_actions = self._current_mcts_player.sample_actions(
mcts_dist=mcts_dist)
if init_action is not None:
# always include the action taken by the policy as a choice.
sampled_actions[-1] = init_action
# Calcualte probabilities for the sampled actions.
child_probs = mcts_dist.pdf(sampled_actions)
# update `move_to_action` for the root node.
for i, a in enumerate(sampled_actions):
first_node.move_to_action[i] = a
# Expand each action one by one and populate child node statistics.
first_iteration = True
child_reward = np.zeros(0)
child_observ = np.zeros(0)
child_state_qpos = np.zeros(0)
child_state_qvel = np.zeros(0)
child_done = np.zeros(0)
for mcts_env, mcts_action in zip(self._current_mcts_player.tree_env,
sampled_actions):
mcts_env.reset()
mcts_env.set_state(first_node.state.qpos, first_node.state.qvel)
observ, reward, done, _ = mcts_env.step(mcts_action)
state = mcts_env.sim.get_state()
if first_iteration:
child_reward = np.array([reward])
child_observ = np.array([observ])
child_state_qpos = np.array([state.qpos])
child_state_qvel = np.array([state.qvel])
child_done = np.array([done])
first_iteration = False
else:
child_reward = np.concatenate((child_reward, np.array([reward])))
child_observ = np.concatenate((child_observ, [observ]))
child_state_qpos = np.concatenate((child_state_qpos, [state.qpos]))
child_state_qvel = np.concatenate((child_state_qvel, [state.qvel]))
child_done = np.concatenate((child_done, np.array([done])))
# Update the reward value for the selected leaf's children and perform
# backup step.
max_num_actions = self._current_mcts_player.max_num_actions
first_node.child_reward = child_reward[:max_num_actions]
first_node.move_to_observ = child_observ[:max_num_actions]
first_node.move_to_state = [(qpos, qvel) for qpos, qvel in zip(
child_state_qpos[:max_num_actions],
child_state_qvel[:max_num_actions])]
first_node.move_to_done = child_done[:max_num_actions]
# Update the values for all the children by calling the value network.
network_children = self.call_policy(
first_node.move_to_observ, only_normalized=True)
first_node.child_w = network_children['value']
# Incorporate the results up to root (`backup` step in MCTS).
first_node.incorporate_results(
child_probs=child_probs,
node_value=policy_out['value'][0],
up_to=first_node)
def run_mcts_trajectory(self, max_horizon):
"""Run a trajectory with length max_horizon using MCTS.
Args:
max_horizon: maximum number of steps for the trajectory.
Returns:
Update trajectory_* (`states`, `actions`, `values`, `neg_logprobs`,
`rewards`, `dones`) with the new trajectory.
"""
self.initialize_trajectory_data()
# Take steps in the environment for `max_horizon` number of steps.
for _ in range(max_horizon):
self.env_states.append(
self._current_mcts_player.tree_env[0].sim.get_state())
self._current_mcts_player.root.inject_noise()
move = self._current_mcts_player.suggest_move()
# Append the current observation to the game trajectory.
self.trajectory_states.append(self._gym_state)
self._current_mcts_player.play_move(move)
mcts_action = self._current_mcts_player.game_actions[-1]
mcts_value = self._current_mcts_player.game_values[-1]
mcts_done = self._current_mcts_player.game_dones[-1]
mcts_prob = self._current_mcts_player.game_probs[-1]
mcts_reward = self._current_mcts_player.game_rewards[-1]
mcts_observ = self._current_mcts_player.game_observs[-1]
mcts_mean = self._current_mcts_player.game_means[-1]
mcts_logstd = self._current_mcts_player.game_logstd[-1]
reward = mcts_reward
# The probabilities are already normalized.
mcts_neg_logprob = mcts_prob
self.trajectory_actions.append(mcts_action)
self.trajectory_values.append(mcts_value)
self.trajectory_dones.append(mcts_done)
self.trajectory_neg_logprobs.append(mcts_neg_logprob)
self.trajectory_means.append(mcts_mean)
self.trajectory_logstds.append(mcts_logstd)
# Take the sampled action in the environment and get the reward.
self._gym_state = mcts_observ
self._env_done = mcts_done
# Normalize and clip the next obeservation.
self._gym_state = self._norm_clip_ob(np.asarray([self._gym_state]))[0]
# Update current episode reward and length.
self._episode_reward += reward
self._episode_length += 1
# Update return value.
self._ret = self._ret * self._gamma + reward
# Update running mean/std for reward.
self._ret_rms.update(np.asarray(self._ret))
if self.reward_normalized:
reward = np.clip(reward / np.sqrt(self._ret_rms.var + self.epsilon),
0. - self.clip_rew, self.clip_rew)
self.trajectory_per_step_rewards.append(reward)
if self._env_done:
self.trajectory_per_episode_rewards.append(self._episode_reward)
self.trajectory_per_episode_lengths.append(self._episode_length)
# Reset return normalizer to zero.
self._ret = np.zeros(1)
# Initialize MCTS player and expand root node.
self.mcts_initialization()
self._ret[0] = 0.
self._episode_reward = 0.
self._episode_length = 0
self._last_done = self._env_done
# Get the last state-value.
policy_out = self.call_policy([self._gym_state])
self._last_value = policy_out['value'][0]
# If the max_horizon is not enough for one episode, record the reward
# and length here.
if not self.trajectory_per_episode_rewards:
self.trajectory_per_episode_rewards.append(self._episode_reward)
self.trajectory_per_episode_lengths.append(self._episode_length)
# Calculate return.
self.calc_returns()
def run_trajectory(self, max_horizon):
"""Run a trajectory with length max_horizon using a policy network.
Args:
max_horizon: maximum number of steps for the trajectory.
Returns:
Update trajectory_* (`states`, `actions`, `values`, `neg_logprobs`,
`rewards`, `dones`) with the new trajectory.
"""
self.initialize_trajectory_data()
# Take steps in the environment for `max_horizon` number of steps.
for _ in range(max_horizon):
# Call policy network.
policy_out = self.call_policy([self._gym_state])
self.trajectory_states.append(self._gym_state)
self.env_states.append(self.env.sim.get_state())
# Sample an action from the policy network (`Gaussian Distribution`).
orig_action = policy_out['action'][0]
# Perform action clipping if it is enabled.
action = self.update_action(orig_action)
self.trajectory_actions.append(orig_action)
# Get state-value for the current state.
self.trajectory_values.append(policy_out['value'][0])
# Calculate negative log probability (if `tanh` clipping is enabled
# we need to add a correction to log probability).
# Check: https://arxiv.org/pdf/1801.01290.pdf
if self._tanh_action_clipping:
neg_logprobs = policy_out['neg_logprob'][0]
new_logprobs = -neg_logprobs - np.sum(
np.log(1.0 - (np.tanh(orig_action)**2.0) + self.epsilon))
self.trajectory_neg_logprobs.append(-new_logprobs)
else:
self.trajectory_neg_logprobs.append(policy_out['neg_logprob'][0])
# Append the status of curren state (done/not done).
self.trajectory_dones.append(self._env_done)
self.trajectory_means.append(policy_out['mean'][0])
self.trajectory_logstds.append(policy_out['logstd'][0])
# Take the sampled action in the environment and get the reward.
self._gym_state, reward, self._env_done, _ = self.step(action)
# Update current episode reward and length.
self._episode_reward += reward
self._episode_length += 1
# Update return value.
self._ret = self._ret * self._gamma + reward
# Update running mean/std for reward.
self._ret_rms.update(np.asarray(self._ret))
if self.reward_normalized:
reward = np.clip(reward / np.sqrt(self._ret_rms.var + self.epsilon),
0. - self.clip_rew, self.clip_rew)
self.trajectory_per_step_rewards.append(reward)
# Normalize and clip the next obeservation.
self._gym_state = self._norm_clip_ob(np.asarray([self._gym_state]))[0]
if self._env_done:
self.trajectory_per_episode_rewards.append(self._episode_reward)
self.trajectory_per_episode_lengths.append(self._episode_length)
self._gym_state = self.reset()
self._ret[0] = 0.
self._episode_reward = 0.
self._episode_length = 0
self._last_done = self._env_done
# Get the last state-value.
policy_out = self.call_policy([self._gym_state])
self._last_value = policy_out['value'][0]
# If the max_horizon is not enough for one episode, record the reward
# and length here.
if not self.trajectory_per_episode_rewards:
self.trajectory_per_episode_rewards.append(self._episode_reward)
self.trajectory_per_episode_lengths.append(self._episode_length)
# Calculate return.
self.calc_returns()
def calc_returns(self):
"""Calculate return.
Update `_epi_returns` array for trajectory returns.
"""
# Convert all the arrays to numpy arrays.
self.trajectory_states = np.asarray(
self.trajectory_states, dtype=self.env.observation_space.dtype)
self.trajectory_actions = np.asarray(
self.trajectory_actions, dtype=np.float32)
self.trajectory_values = np.asarray(
self.trajectory_values, dtype=np.float32)
self.trajectory_neg_logprobs = np.asarray(
self.trajectory_neg_logprobs, dtype=np.float32)
self.trajectory_means = np.asarray(self.trajectory_means, dtype=np.float32)
self.trajectory_logstds = np.asarray(
self.trajectory_logstds, dtype=np.float32)
self.trajectory_per_episode_rewards = np.asarray(
self.trajectory_per_episode_rewards, dtype=np.float32)
self.trajectory_per_episode_lengths = np.asarray(
self.trajectory_per_episode_lengths, dtype=np.float32)
self.trajectory_dones = np.asarray(self.trajectory_dones, dtype=np.bool)
self.trajectory_per_step_rewards = np.asarray(
self.trajectory_per_step_rewards, dtype=np.float32)
# Perform calculation.
mb_returns = np.zeros_like(self.trajectory_per_step_rewards)
mb_advs = np.zeros_like(self.trajectory_per_step_rewards)
lastgaelam = 0
for t in reversed(range(len(self.trajectory_per_step_rewards))):
if t == len(self.trajectory_per_step_rewards) - 1:
nextnonterminal = 1.0 - self._last_done
nextvalues = self._last_value
else:
nextnonterminal = 1.0 - self.trajectory_dones[t + 1]
nextvalues = self.trajectory_values[t + 1]
delta = self.trajectory_per_step_rewards[t] + (
self._gamma * nextvalues * nextnonterminal) - (
self.trajectory_values[t])
mb_advs[t] = lastgaelam = delta + (
self._gamma * self._lam * nextnonterminal * lastgaelam)
mb_returns = mb_advs + self.trajectory_values
self.trajectory_returns = mb_returns
def update_estimator(self, test_mode=False):
"""Update the estimator from the most recent checkpoint.
Args:
test_mode: If set, it does not call tf_utils.
"""
if not test_mode:
self._policy = tf_utils.create_predictor(self.estimator,
self.serving_input_fn)
def mcts_sample_enable(self):
"""Indicates whether we should switch to MCTS data sampling.
Returns:
a boolean indicating whether MCTS sampling should start.
"""
if not self._mcts_enable:
return False
if self._random_gen.uniform() <= self._mcts_sampling_frac:
return True
if ((self._sampling_step >= self._mcts_start_step_frac) and
(self._sampling_step < self._mcts_end_step_frac)):
return True
return False
def initialize_episode_data(self):
# If we switch from PPO sampling to MCTS sampling or vice versa,
# reset `episode_length` and `episode_reward` to zero.
# That is, throwing away the data from PPO sampling.
# Also, restart the return normalization. We treat this like starting
# a new episode.
self._episode_length = 0
self._episode_reward = 0.
self._ret = np.zeros(1)
self._ret[0] = 0.
def play(self, max_steps, test_mode=False):
"""Runs max_steps in the environment.
Args:
max_steps: Maximum number of steps to run.
test_mode: If set, it does not update the policy.
Returns:
An array of states, actions, values, neg_logprobs, rewards, and returns.
"""
# Update the estimator with the most recent checkpoint.
self.update_estimator(test_mode)
if self._first_time_call and self._mcts_enable:
self.mcts_initialization()
self._first_time_call = False
if self.mcts_sample_enable():
logging.info('MCTS Sampling...')
# Update number of MCTS simulation with a decay factor.
num_mcts_sim = max(
self._current_mcts_player.num_mcts_sim * self._mcts_sim_decay_factor,
4.0)
mcts_temperature = max(
self._current_mcts_player.temp_threshold *
self._mcts_sim_decay_factor, 1.0)
# Update MCTS hyperparameters
self._current_mcts_player.num_mcts_sim = num_mcts_sim
self._current_mcts_player.temp_threshold = mcts_temperature
# If this is the first time performing MCTS sampling,
# we need to perform a hard reset.
if not self.mcts_sampling:
self.initialize_episode_data()
# Perform MCTS sampling only if the frequency of MCTS sampling
# limit is met.
if self._num_mcts_samples % self._mcts_collect_data_freq == 0:
# Run trajectory for the specified number of steps using MCTS.
self.run_mcts_trajectory(max_steps)
self.mcts_sampling = True
self._num_mcts_samples += 1
else:
# Run trajectory for the specified number of steps using policy network.
logging.info('Policy Sampling...')
# If the last sampling was MCTS, we need to perform a hard reset.
if self.mcts_sampling:
self.initialize_episode_data()
self.run_trajectory(max_steps)
self.mcts_sampling = False
self._sampling_step += 1
|
google-research/google-research
|
polish/env/env.py
|
Python
|
apache-2.0
| 30,514
|
[
"Gaussian"
] |
949edc65b01327cb2a32ab8073ea0763aad2c5d76eda5ad392959fcb6e83ec1e
|
"""
The scene consists of
* Four actors: a rectangle, a box, a cone and a sphere.
The box, the cone and the sphere are above the rectangle.
* Two spotlights: one in the direction of the box, another one in the direction of the sphere.
Both lights are above the box, the cone and the sphere.
"""
import vtk
def main():
interactor = vtk.vtkRenderWindowInteractor()
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetSize(400, 400)
renderWindow.SetMultiSamples(0)
renderWindow.SetAlphaBitPlanes(1)
interactor.SetRenderWindow(renderWindow)
renderer = vtk.vtkOpenGLRenderer()
renderWindow.AddRenderer(renderer)
renderWindow.SetSize(640, 480)
rectangleSource = vtk.vtkPlaneSource()
rectangleSource.SetOrigin(-5.0, 0.0, 5.0)
rectangleSource.SetPoint1(5.0, 0.0, 5.0)
rectangleSource.SetPoint2(-5.0, 0.0, -5.0)
rectangleSource.SetResolution(100, 100)
rectangleMapper = vtk.vtkPolyDataMapper()
rectangleMapper.SetInputConnection(rectangleSource.GetOutputPort())
rectangleMapper.SetScalarVisibility(0)
shadows = vtk.vtkShadowMapPass()
seq = vtk.vtkSequencePass()
passes = vtk.vtkRenderPassCollection()
passes.AddItem(shadows.GetShadowMapBakerPass())
passes.AddItem(shadows)
seq.SetPasses(passes)
cameraP = vtk.vtkCameraPass()
cameraP.SetDelegatePass(seq)
# tell the renderer to use our render pass pipeline
glrenderer = renderer
glrenderer.SetPass(cameraP)
colors = vtk.vtkNamedColors()
boxColor = colors.GetColor3d("Tomato")
rectangleColor = colors.GetColor3d("Beige")
coneColor = colors.GetColor3d("Peacock")
sphereColor = colors.GetColor3d("Banana")
rectangleActor = vtk.vtkActor()
rectangleActor.SetMapper(rectangleMapper)
rectangleActor.VisibilityOn()
rectangleActor.GetProperty().SetColor(rectangleColor)
boxSource = vtk.vtkCubeSource()
boxSource.SetXLength(2.0)
boxNormals = vtk.vtkPolyDataNormals()
boxNormals.SetInputConnection(boxSource.GetOutputPort())
boxNormals.ComputePointNormalsOff()
boxNormals.ComputeCellNormalsOn()
boxNormals.Update()
boxNormals.GetOutput().GetPointData().SetNormals(None)
boxMapper = vtk.vtkPolyDataMapper()
boxMapper.SetInputConnection(boxNormals.GetOutputPort())
boxMapper.ScalarVisibilityOff()
boxActor = vtk.vtkActor()
boxActor.SetMapper(boxMapper)
boxActor.VisibilityOn()
boxActor.SetPosition(-2.0, 2.0, 0.0)
boxActor.GetProperty().SetColor(boxColor)
coneSource = vtk.vtkConeSource()
coneSource.SetResolution(24)
coneSource.SetDirection(1.0, 1.0, 1.0)
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInputConnection(coneSource.GetOutputPort())
coneMapper.SetScalarVisibility(0)
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
coneActor.VisibilityOn()
coneActor.SetPosition(0.0, 1.0, 1.0)
coneActor.GetProperty().SetColor(coneColor)
sphereSource = vtk.vtkSphereSource()
sphereSource.SetThetaResolution(32)
sphereSource.SetPhiResolution(32)
sphereMapper = vtk.vtkPolyDataMapper()
sphereMapper.SetInputConnection(sphereSource.GetOutputPort())
sphereMapper.ScalarVisibilityOff()
sphereActor = vtk.vtkActor()
sphereActor.SetMapper(sphereMapper)
sphereActor.VisibilityOn()
sphereActor.SetPosition(2.0, 2.0, -1.0)
sphereActor.GetProperty().SetColor(sphereColor)
renderer.AddViewProp(rectangleActor)
renderer.AddViewProp(boxActor)
renderer.AddViewProp(coneActor)
renderer.AddViewProp(sphereActor)
# Spotlights.
# lighting the box.
l1 = vtk.vtkLight()
l1.SetPosition(-4.0, 4.0, -1.0)
l1.SetFocalPoint(boxActor.GetPosition())
l1.SetColor(1.0, 1.0, 1.0)
l1.PositionalOn()
renderer.AddLight(l1)
l1.SwitchOn()
# lighting the sphere
l2 = vtk.vtkLight()
l2.SetPosition(4.0, 5.0, 1.0)
l2.SetFocalPoint(sphereActor.GetPosition())
l2.SetColor(1.0, 0.0, 1.0)
l2.PositionalOn()
renderer.AddLight(l2)
l2.SwitchOn()
# For each spotlight, add a light frustum wireframe representation and a cone
# wireframe representation, colored with the light color.
angle = l1.GetConeAngle()
if l1.LightTypeIsSceneLight() and l1.GetPositional() and angle < 180.0: # spotlight
la = vtk.vtkLightActor()
la.SetLight(l1)
renderer.AddViewProp(la)
angle = l2.GetConeAngle()
if l2.LightTypeIsSceneLight() and l2.GetPositional() and angle < 180.0: # spotlight
la = vtk.vtkLightActor()
la.SetLight(l2)
renderer.AddViewProp(la)
renderer.SetBackground2(colors.GetColor3d("Silver"))
renderer.SetBackground(colors.GetColor3d("LightGrey"))
renderer.SetGradientBackground(True)
renderWindow.Render()
renderWindow.SetWindowName('ShadowsLightsDemo')
renderer.ResetCamera()
camera = renderer.GetActiveCamera()
camera.Azimuth(40.0)
camera.Elevation(10.0)
renderWindow.Render()
interactor.Start()
if __name__ == '__main__':
main()
|
lorensen/VTKExamples
|
src/Python/Visualization/ShadowsLightsDemo.py
|
Python
|
apache-2.0
| 5,081
|
[
"VTK"
] |
57f2965769f9586ce12fd77abb83b077bb80dd40f90e0bc68d5911951aecced5
|
#!/usr/bin/env python
from optparse import OptionParser
code_choices = ['gpaw', 'dacapo', 'abinit', 'elk']
adsorbate_choices = ['None', 'N', 'O']
geometry_choices = ['fix', 'relax']
mode_choices = ['molecule', 'slab']
xc_choices = ['PW91', 'LDA', 'PBE']
parser = OptionParser(usage='%prog [options] package.\nExample of call:\n'+
'Calculate adsorption on Ru001 system:'
'python %prog --code=dacapo\n'+
'python %prog --code=dacapo --adsorbate=N\n'+
'python %prog --code=dacapo --adsorbate=O\n'+
'python %prog --code=dacapo --mode=slab\n'+
'python %prog --code=dacapo --mode=slab --adsorbate=N\n'+
'python %prog --code=dacapo --mode=slab --adsorbate=O\n',
version='%prog 0.1')
parser.add_option('--code', dest="code", type="choice",
default=code_choices[0],
choices=code_choices,
help='code: which code to use.')
parser.add_option('--adsorbate', dest="adsorbate", type="choice",
default=adsorbate_choices[0],
choices=adsorbate_choices,
help='adsorbate.')
parser.add_option('--geometry', dest="geometry", type="choice",
default=geometry_choices[-1],
choices=geometry_choices,
help='geometry: fix geometry (read from traj file) or relax.')
parser.add_option('--mode', dest="mode", type="choice",
default=mode_choices[0],
choices=mode_choices,
help='mode: calculate molecules or slab w/wo molecules.')
parser.add_option('--xc', dest="xc", type="choice",
default=xc_choices[-1],
choices=xc_choices,
help='XC functional.')
parser.add_option('-v', '--verbose', action='store_true',
default=False,
help='verbose mode.')
opt, args = parser.parse_args()
from os import remove
from os.path import exists, join
try:
import numpy as np
except ImportError:
raise SystemExit('numpy is not installed!')
try:
import gpaw
except ImportError:
raise SystemExit('gpaw is not installed!')
from gpaw.utilities.tools import gridspacing2cutoff
try:
import ase
except ImportError:
raise SystemExit('ase is not installed!')
from ase import Atoms, Atom
from ase import molecule
from ase import QuasiNewton
from ase.lattice.surface import hcp0001, add_adsorbate
from ase.constraints import FixAtoms
from ase.io.xyz import read_xyz
from ase.io.trajectory import PickleTrajectory
from ase.io.trajectory import read_trajectory, write_trajectory
import time
from gpaw import setup_paths
setup_paths.insert(0, '.')
h = 0.2
fmax= 0.07
add_vacuum = 2.0 # additional vacuum
def initialize_parameters(code, width, h):
parameters = {}
if code == 'gpaw':
from gpaw import GPAW as Calculator
from gpaw.mpi import rank
parameters['h'] = h
parameters['width'] = width
parameters['stencils'] = (3,3)
#parameters['eigensolver'] = 'cg'
conv_param = parameters['h']
elif code == 'dacapo':
from ase.calculators.dacapo import Dacapo as Calculator
rank = 0
parameters['kT'] = width
parameters['planewavecutoff'] = gridspacing2cutoff(h)
parameters['densitycutoff'] = parameters['planewavecutoff']*1.2
conv_param = parameters['planewavecutoff']
elif code == 'abinit':
from ase.calculators.abinit import Abinit as Calculator
rank = 0
parameters['width'] = width
parameters['ecut'] = gridspacing2cutoff(h)*1.4
conv_param = parameters['ecut']
elif code == 'elk':
parameters['swidth'] = width
parameters['stype'] = 3
parameters['autormt'] = True
parameters['tforce'] = True # calulate forces
parameters['nempty'] = 20 # default 5
#parameters['fixspin'] = -1 # default 0
#parameters['evalmin'] = -15.0 # default -4.5
#parameters['autokpt'] = True
#parameters['nosym'] = True
#parameters['radkpt'] = 10.0 # default 40.0
parameters['gmaxvr'] = 16 # default 12 #
parameters['rgkmax'] = 7.0 # default 7 #
#parameters['gmaxvr'] = 19 # default 12
#parameters['rgkmax'] = 9.5 # default 7
parameters['beta0'] = 0.02 # default 0.05
parameters['betamax'] = 0.05 # default 0.5
parameters['maxscl'] = 500 # default 200
#parameters['mixtype'] = 2 # Pulay # default 1
#parameters['deband'] = 0.005 # default 0.0025
#parameters['rmtapm'] = '0.25 0.90' # default (0.25,0.95)
return parameters
def run_molecule(adsorbate, geometry, xc, code):
parameters = initialize_parameters(code, 0.01, h)
parameters['xc'] = xc
molecules = {
'None': ('NO', 8),
'N': ('N2', 8),
'O': ('O2', 8),
}
for name, nbands in [molecules[adsorbate]]:
if code != 'elk':
parameters['nbands'] = nbands
if geometry == 'fix':
mol = read_trajectory(code+'_'+name+'.traj')
else:
mol = molecule(name)
mol.center(vacuum=3.0+add_vacuum)
if name == 'NO':
mol.translate((0, 0.1, 0))
#
if code == 'gpaw':
from gpaw import GPAW as Calculator
from gpaw.mpi import rank
parameters['txt'] = code+'_'+name+'.txt'
from gpaw.mixer import Mixer, MixerSum
#if name == 'N2':
# parameters['mixer'] = Mixer(beta=0.1, nmaxold=5, metric='new', weight=100.0)
#else:
# #parameters['eigensolver'] = 'cg'
# parameters['mixer'] = MixerSum(beta=0.2, nmaxold=5, metric='new', weight=100.0)
if code == 'dacapo':
from ase.calculators.dacapo import Dacapo as Calculator
rank = 0
parameters['txtout'] = code+'_'+name+'.txt'
if code == 'abinit':
from ase.calculators.abinit import Abinit as Calculator
rank = 0
parameters['label'] = code+'_'+name
if code == 'elk':
from ase.calculators.elk import ELK as Calculator
rank = 0
elk_dir = 'elk_'+str(parameters['rgkmax'])
conv_param = 1.0
parameters['dir'] = elk_dir+'_'+name
#
calc = Calculator(
**parameters)
#
mol.set_calculator(calc)
try:
if geometry == 'fix':
mol.get_potential_energy()
traj = PickleTrajectory(code+'_'+name+'.traj', mode='w')
traj.write(mol)
else:
opt = QuasiNewton(mol, logfile=code+'_'+name+'.qn', trajectory=code+'_'+name+'.traj')
opt.run(fmax=fmax)
except:
raise
def run_slab(adsorbate, geometry, xc, code):
parameters = initialize_parameters(code, 0.1, h)
parameters['xc'] = xc
tag = 'Ru001'
if adsorbate != 'None':
name = adsorbate + tag
else:
name = tag
if geometry == 'fix':
slab = read_trajectory(code+'_'+name+'.traj')
else:
adsorbate_heights = {'N': 1.108, 'O': 1.257}
slab = hcp0001('Ru', size=(2, 2, 4), a=2.72, c=1.58*2.72,
vacuum=5.0+add_vacuum,
orthogonal=True)
slab.center(axis=2)
if adsorbate != 'None':
add_adsorbate(slab, adsorbate, adsorbate_heights[adsorbate], 'hcp')
slab.set_constraint(FixAtoms(mask=slab.get_tags() >= 3))
if code != 'elk':
parameters['nbands'] = 80
parameters['kpts'] = [4, 4, 1]
#
if code == 'gpaw':
from gpaw import GPAW as Calculator
from gpaw.mpi import rank
parameters['txt'] = code+'_'+name+'.txt'
from gpaw.mixer import Mixer, MixerSum
parameters['mixer'] = Mixer(beta=0.2, nmaxold=5, weight=100.0)
if code == 'dacapo':
from ase.calculators.dacapo import Dacapo as Calculator
rank = 0
parameters['txtout'] = code+'_'+name+'.txt'
if code == 'abinit':
from ase.calculators.abinit import Abinit as Calculator
rank = 0
parameters['label'] = code+'_'+name
if code == 'elk':
from ase.calculators.elk import ELK as Calculator
rank = 0
parameters['autokpt'] = True
elk_dir = 'elk_'+str(parameters['rgkmax'])
conv_param = 1.0
parameters['dir'] = elk_dir+'_'+name
#
calc = Calculator(
**parameters)
#
slab.set_calculator(calc)
try:
if geometry == 'fix':
slab.get_potential_energy()
traj = PickleTrajectory(code+'_'+name+'.traj', mode='w')
traj.write(slab)
else:
opt = QuasiNewton(slab, logfile=code+'_'+name+'.qn', trajectory=code+'_'+name+'.traj')
opt.run(fmax=fmax)
except:
raise
if __name__ == '__main__':
assert len(args) == 0, 'Error: arguments not accepted'
assert opt.code in code_choices, opt.code+' not in '+str(code_choices)
assert opt.adsorbate in adsorbate_choices, opt.adsorbate+' not in '+str(adsorbate_choices)
assert opt.geometry in geometry_choices, opt.geometry+' not in '+str(geometry_choices)
assert opt.mode in mode_choices, opt.mode+' not in '+str(mode_choices)
assert opt.xc in xc_choices, opt.xc+' not in '+str(xc_choices)
##if opt.mode == 'molecule':
## assert opt.adsorbate == 'None', 'adsorbate in molecule: not implemented yet'
if opt.code == 'dacapo':
try:
import ASE
except ImportError:
raise SystemExit('ASE (2) is not installed!')
if opt.mode == 'molecule':
run_molecule(str(opt.adsorbate), opt.geometry, opt.xc, opt.code)
elif opt.mode == 'slab':
run_slab(str(opt.adsorbate), opt.geometry, opt.xc, opt.code)
|
qsnake/gpaw
|
gpaw/test/big/Ru001/adsRu001.py
|
Python
|
gpl-3.0
| 10,021
|
[
"ABINIT",
"ASE",
"Elk",
"GPAW"
] |
e9f090c9b716439bcaae0db63c228265add92ce578f10cfe1b9c6ff3062d5c56
|
# GromacsWrapper -- cbook.py
# Copyright (c) 2009 Oliver Beckstein <orbeckst@gmail.com>
# Released under the GNU Public License 3 (or higher, your choice)
"""
:mod:`gromacs.cbook` -- Gromacs Cook Book
=========================================
The :mod:`~gromacs.cbook` (cook book) module contains short recipes for tasks
that are often repeated. In the simplest case this is just one of the
gromacs tools with a certain set of default command line options.
By abstracting and collecting these invocations here, errors can be
reduced and the code snippets can also serve as canonical examples for
how to do simple things.
Miscellaneous canned Gromacs commands
-------------------------------------
Simple commands with new default options so that they solve a specific
problem (see also `Manipulating trajectories and structures`_):
.. function:: rmsd_backbone([s="md.tpr", f="md.xtc"[, ...]])
Computes the RMSD of the "Backbone" atoms after fitting to the
"Backbone" (including both translation and rotation).
Manipulating trajectories and structures
----------------------------------------
Standard invocations for manipulating trajectories.
.. function:: trj_compact([s="md.tpr", f="md.xtc", o="compact.xtc"[, ...]])
Writes an output trajectory or frame with a compact representation
of the system centered on the protein. It centers on the group
"Protein" and outputs the whole "System" group.
.. function:: trj_xyfitted([s="md.tpr", f="md.xtc"[, ...]])
Writes a trajectory centered and fitted to the protein in the XY-plane only.
This is useful for membrane proteins. The system *must* be oriented so that
the membrane is in the XY plane. The protein backbone is used for the least
square fit, centering is done for the whole protein., but this can be
changed with the *input* = ``('backbone', 'protein','system')`` keyword.
.. Note:: Gromacs 4.x only
.. autofunction:: trj_fitandcenter
.. autofunction:: cat
.. autoclass:: Frames
:members:
.. autoclass:: Transformer
:members:
.. autofunction:: get_volume
Processing output
-----------------
There are cases when a script has to to do different things depending
on the output from a Gromacs tool.
For instance, a common case is to check the total charge after
grompping a tpr file. The ``grompp_qtot`` function does just that.
.. autofunction:: grompp_qtot
.. autofunction:: get_volume
.. autofunction:: parse_ndxlist
Working with topologies and mdp files
-------------------------------------
.. autofunction:: create_portable_topology
.. autofunction:: edit_mdp
.. autofunction:: add_mdp_includes
.. autofunction:: grompp_qtot
Working with index files
------------------------
Manipulation of index files (``ndx``) can be cumbersome because the
``make_ndx`` program is not very sophisticated (yet) compared to
full-fledged atom selection expression as available in Charmm_, VMD_, or
MDAnalysis_. Some tools help in building and interpreting index files.
.. SeeAlso:: The :class:`gromacs.formats.NDX` class can solve a number
of index problems in a cleaner way than the classes and
functions here.
.. autoclass:: IndexBuilder
:members: combine, gmx_resid
.. autofunction:: parse_ndxlist
.. autofunction:: get_ndx_groups
.. autofunction:: make_ndx_captured
.. _MDAnalysis: http://mdanalysis.org
.. _VMD: http://www.ks.uiuc.edu/Research/vmd/current/ug/node87.html
.. _Charmm: http://www.charmm.org/html/documentation/c35b1/select.html
File editing functions
----------------------
It is often rather useful to be able to change parts of a template
file. For specialized cases the two following functions are useful:
.. autofunction:: edit_mdp
.. autofunction:: edit_txt
"""
# Right now the simplest thing to do is to just create instances with pre-set
# values; this works fine and is succinct but has some disadvantages:
# * the underlying gromacs tool is executed to extract the help string; this
# adds to the import time
# * adding documentation is awkward
#
# For more complicated cases one is probably better off by properly deriving a
# new class and set arguments explicitly in init (using kwargs['flag'] =
# default) ... or I can write some meta(??) class to do this nicely
from __future__ import absolute_import, with_statement
__docformat__ = "restructuredtext en"
import sys
import os
import re
import warnings
import tempfile
import shutil
import glob
import six
import logging
logger = logging.getLogger('gromacs.cbook')
import gromacs
from .exceptions import GromacsError, BadParameterWarning, MissingDataWarning, GromacsValueWarning, GromacsImportWarning
from . import tools
from . import utilities
from .utilities import asiterable
def _define_canned_commands():
"""Define functions for the top level name space.
Definitions are collected here so that they can all be wrapped in
a try-except block that avoids code failing when the Gromacs tools
are not available --- in some cases they are not necessary to use
parts of GromacsWrapper.
.. Note:: Any function defined here **must be listed in ``global``**!
"""
global trj_compact, rmsd_backbone, trj_fitted, trj_xyfitted
trj_compact = tools.Trjconv(ur='compact', center=True, boxcenter='tric', pbc='mol',
input=('protein','system'),
doc="""
Writes a compact representation of the system centered on the protein""")
rmsd_backbone = tools.G_rms(what='rmsd', fit='rot+trans',
input=('Backbone','Backbone'),
doc="""
Computes RMSD of backbone after fitting to the backbone.""")
trj_fitted = tools.Trjconv(fit='rot+trans',
input=('backbone', 'system'),
doc="""
Writes a trajectory fitted to the protein backbone.
Note that this does *not* center; if center is required, the *input*
selection should have the group to be centered on in second position,
e.g. ``input = ('backbone', 'Protein', System')``.
""")
# Gromacs 4.x
trj_xyfitted = tools.Trjconv(fit='rotxy+transxy',
input=('backbone', 'protein','system'),
doc="""
Writes a trajectory fitted to the protein in the XY-plane only.
This is useful for membrane proteins. The system *must* be oriented so
that the membrane is in the XY plane. The protein backbone is used
for the least square fit, centering is done for the whole protein.
Note that centering together with fitting does not always work well
and that one sometimes need two runs of trjconv: one to center and
one to fit.
.. Note:: Gromacs 4.x only""")
# end of _define_canned_commands
try:
_define_canned_commands()
except (OSError, ImportError, AttributeError, GromacsError):
msg = ("Failed to define a number of commands in gromacs.cbook. Most "
"likely the Gromacs installation cannot be found --- set GMXRC in "
"~/.gromacswrapper.cfg or source GMXRC directly")
warnings.warn(msg, category=GromacsImportWarning)
logger.error(msg)
finally:
del _define_canned_commands
def trj_fitandcenter(xy=False, **kwargs):
"""Center everything and make a compact representation (pass 1) and fit the system to a reference (pass 2).
:Keywords:
*s*
input structure file (tpr file required to make molecule whole);
if a list or tuple is provided then s[0] is used for pass 1 (should be a tpr)
and s[1] is used for the fitting step (can be a pdb of the whole system)
If a second structure is supplied then it is assumed that the fitted
trajectory should *not* be centered.
*f*
input trajectory
*o*
output trajectory
*input*
A list with three groups. The default is ``['backbone', 'protein','system']``.
The fit command uses all three (1st for least square fit,
2nd for centering, 3rd for output), the centered/make-whole stage use
2nd for centering and 3rd for output.
*input1*
If *input1* is supplied then *input* is used exclusively
for the fitting stage (pass 2) and *input1* for the centering (pass 1).
*n*
Index file used for pass 1 and pass 2.
*n1*
If *n1* is supplied then index *n1* is only used for pass 1
(centering) and *n* for pass 2 (fitting).
*xy* : boolean
If ``True`` then only do a rot+trans fit in the xy plane
(good for membrane simulations); default is ``False``.
*kwargs*
All other arguments are passed to :class:`~gromacs.tools.Trjconv`.
Note that here we first center the protein and create a compact box, using
``-pbc mol -ur compact -center -boxcenter tric`` and write an intermediate
xtc. Then in a second pass we perform a rotation+translation fit (or
restricted to the xy plane if *xy* = ``True`` is set) on the intermediate
xtc to produce the final trajectory. Doing it in this order has the
disadvantage that the solvent box is rotating around the protein but the
opposite order (with center/compact second) produces strange artifacts
where columns of solvent appear cut out from the box---it probably means
that after rotation the information for the periodic boundaries is not
correct any more.
Most kwargs are passed to both invocations of
:class:`gromacs.tools.Trjconv` so it does not really make sense to use eg
*skip*; in this case do things manually.
By default the *input* to the fit command is ('backbone',
'protein','system'); the compact command always uses the second and third
group for its purposes or if this fails, prompts the user.
Both steps cannot performed in one pass; this is a known limitation of
``trjconv``. An intermediate temporary XTC files is generated which should
be automatically cleaned up unless bad things happened.
The function tries to honour the input/output formats. For instance, if you
want trr output you need to supply a trr file as input and explicitly give
the output file also a trr suffix.
.. Note:: For big trajectories it can **take a very long time**
and consume a **large amount of temporary diskspace**.
We follow the `g_spatial documentation`_ in preparing the trajectories::
trjconv -s a.tpr -f a.xtc -o b.xtc -center -boxcenter tric -ur compact -pbc mol
trjconv -s a.tpr -f b.xtc -o c.xtc -fit rot+trans
.. _`g_spatial documentation`: http://www.gromacs.org/Documentation/Gromacs_Utilities/g_spatial
"""
if xy:
fitmode = 'rotxy+transxy'
kwargs.pop('fit', None)
else:
fitmode = kwargs.pop('fit', 'rot+trans') # user can use progressive, too
intrj = kwargs.pop('f', None)
# get the correct suffix for the intermediate step: only trr will
# keep velocities/forces!
suffix = os.path.splitext(intrj)[1]
if not suffix in ('xtc', 'trr'):
suffix = '.xtc'
outtrj = kwargs.pop('o', None)
ndx = kwargs.pop('n', None)
ndxcompact = kwargs.pop('n1', ndx)
structures = kwargs.pop('s', None)
if type(structures) in (tuple, list):
try:
compact_structure, fit_structure = structures
except:
raise ValueError("argument s must be a pair of tpr/pdb files or a single structure file")
else:
compact_structure = fit_structure = structures
inpfit = kwargs.pop('input', ('backbone', 'protein','system'))
try:
_inpcompact = inpfit[1:] # use 2nd and 3rd group for compact
except TypeError:
_inpcompact = None
inpcompact = kwargs.pop('input1', _inpcompact) # ... or the user supplied ones
fd, tmptrj = tempfile.mkstemp(suffix=suffix, prefix='pbc_compact_')
logger.info("Input structure for PBC: {compact_structure!r}".format(**vars()))
logger.info("Input structure for fit: {fit_structure!r}".format(**vars()))
logger.info("Input trajectory: {intrj!r}".format(**vars()))
logger.info("Output trajectory: {outtrj!r}".format(**vars()))
logger.debug("Writing temporary trajectory {tmptrj!r} (will be auto-cleaned).".format(**vars()))
sys.stdout.flush()
try:
gromacs.trjconv(s=compact_structure, f=intrj, o=tmptrj, n=ndxcompact,
ur='compact', center=True, boxcenter='tric', pbc='mol',
input=inpcompact, **kwargs)
# explicitly set pbc="none" for the fitting stage (anything else will produce rubbish and/or
# complaints from Gromacs)
kwargs['pbc'] = "none"
if compact_structure == fit_structure:
# fit as ususal, including centering
# (Is center=True really necessary? -- note, if I remove center=True then
# I MUST fiddle inpfit as below!!)
gromacs.trjconv(s=fit_structure, f=tmptrj, o=outtrj, n=ndx, fit=fitmode, center=True, input=inpfit, **kwargs)
else:
# make sure that we fit EXACTLY as the user wants
inpfit = [inpfit[0], inpfit[-1]]
gromacs.trjconv(s=fit_structure, f=tmptrj, o=outtrj, n=ndx, fit=fitmode, input=inpfit, **kwargs)
finally:
utilities.unlink_gmx(tmptrj)
def cat(prefix="md", dirname=os.path.curdir, partsdir="parts", fulldir="full",
resolve_multi="pass"):
"""Concatenate all parts of a simulation.
The xtc, trr, and edr files in *dirname* such as prefix.xtc,
prefix.part0002.xtc, prefix.part0003.xtc, ... are
1) moved to the *partsdir* (under *dirname*)
2) concatenated with the Gromacs tools to yield prefix.xtc, prefix.trr,
prefix.edr, prefix.gro (or prefix.md) in *dirname*
3) Store these trajectories in *fulldir*
.. Note:: Trajectory files are *never* deleted by this function to avoid
data loss in case of bugs. You will have to clean up yourself
by deleting *dirname*/*partsdir*.
Symlinks for the trajectories are *not* handled well and
break the function. Use hard links instead.
.. Warning:: If an exception occurs when running this function then make
doubly and triply sure where your files are before running
this function again; otherwise you might **overwrite data**.
Possibly you will need to manually move the files from *partsdir*
back into the working directory *dirname*; this should onlu overwrite
generated files so far but *check carefully*!
:Keywords:
*prefix*
deffnm of the trajectories [md]
*resolve_multi*
how to deal with multiple "final" gro or pdb files: normally there should
only be one but in case of restarting from the checkpoint of a finished
simulation one can end up with multiple identical ones.
- "pass" : do nothing and log a warning
- "guess" : take prefix.pdb or prefix.gro if it exists, otherwise the one of
prefix.partNNNN.gro|pdb with the highes NNNN
*dirname*
change to *dirname* and assume all tarjectories are located there [.]
*partsdir*
directory where to store the input files (they are moved out of the way);
*partsdir* must be manually deleted [parts]
*fulldir*
directory where to store the final results [full]
"""
gmxcat = {'xtc': gromacs.trjcat,
'trr': gromacs.trjcat,
'edr': gromacs.eneconv,
'log': utilities.cat,
}
def _cat(prefix, ext, partsdir=partsdir, fulldir=fulldir):
filenames = glob_parts(prefix, ext)
if ext.startswith('.'):
ext = ext[1:]
outfile = os.path.join(fulldir, prefix + '.' + ext)
if not filenames:
return None
nonempty_files = []
for f in filenames:
if os.stat(f).st_size == 0:
logger.warn("File {f!r} is empty, skipping".format(**vars()))
continue
if os.path.islink(f):
# TODO: re-write the symlink to point to the original file
errmsg = "Symbolic links do not work (file %(f)r), sorry. " \
"CHECK LOCATION OF FILES MANUALLY BEFORE RUNNING gromacs.cbook.cat() AGAIN!" % vars()
logger.exception(errmsg)
raise NotImplementedError(errmsg)
shutil.move(f, partsdir)
nonempty_files.append(f)
filepaths = [os.path.join(partsdir, f) for f in nonempty_files]
gmxcat[ext](f=filepaths, o=outfile)
return outfile
_resolve_options = ("pass", "guess")
if not resolve_multi in _resolve_options:
raise ValueError("resolve_multi must be one of %(_resolve_options)r, "
"not %(resolve_multi)r" % vars())
if fulldir == os.path.curdir:
wmsg = "Using the current directory as fulldir can potentially lead to data loss if you run this function multiple times."
logger.warning(wmsg)
warnings.warn(wmsg, category=BadParameterWarning)
with utilities.in_dir(dirname, create=False):
utilities.mkdir_p(partsdir)
utilities.mkdir_p(fulldir)
for ext in ('log', 'edr', 'trr', 'xtc'):
logger.info("[%(dirname)s] concatenating %(ext)s files...", vars())
outfile = _cat(prefix, ext, partsdir)
logger.info("[%(dirname)s] created %(outfile)r", vars())
for ext in ('gro', 'pdb'): # XXX: ugly, make method out of parts?
filenames = glob_parts(prefix, ext)
if len(filenames) == 0:
continue # goto next ext
elif len(filenames) == 1:
pick = filenames[0]
else:
if resolve_multi == "pass":
logger.warning("[%(dirname)s] too many output structures %(filenames)r, "
"cannot decide which one --- resolve manually!", vars())
for f in filenames:
shutil.move(f, partsdir)
continue # goto next ext
elif resolve_multi == "guess":
pick = prefix + '.' + ext
if not pick in filenames:
pick = filenames[-1] # filenames are ordered with highest parts at end
final = os.path.join(fulldir, prefix + '.' + ext)
shutil.copy(pick, final) # copy2 fails on nfs with Darwin at least
for f in filenames:
shutil.move(f, partsdir)
logger.info("[%(dirname)s] collected final structure %(final)r "
"(from %(pick)r)", vars())
partsdirpath = utilities.realpath(dirname, partsdir)
logger.warn("[%(dirname)s] cat() complete in %(fulldir)r but original files "
"in %(partsdirpath)r must be manually removed", vars())
def glob_parts(prefix, ext):
"""Find files from a continuation run"""
if ext.startswith('.'):
ext = ext[1:]
files = glob.glob(prefix+'.'+ext) + glob.glob(prefix+'.part[0-9][0-9][0-9][0-9].'+ext)
files.sort() # at least some rough sorting...
return files
class Frames(object):
"""A iterator that transparently provides frames from a trajectory.
The iterator chops a trajectory into individual frames for
analysis tools that only work on separate structures such as
``gro`` or ``pdb`` files. Instead of turning the whole trajectory
immediately into pdb files (and potentially filling the disk), the
iterator can be instructed to only provide a fixed number of
frames and compute more frames when needed.
.. Note:: Setting a limit on the number of frames on disk can lead
to longish waiting times because ``trjconv`` must
re-seek to the middle of the trajectory and the only way
it can do this at the moment is by reading frames
sequentially. This might still be preferrable to filling
up a disk, though.
.. Warning:: The *maxframes* option is not implemented yet; use
the *dt* option or similar to keep the number of frames
manageable.
"""
def __init__(self, structure, trj, maxframes=None, format='pdb', **kwargs):
"""Set up the Frames iterator.
:Arguments:
structure
name of a structure file (tpr, pdb, ...)
trj
name of the trajectory (xtc, trr, ...)
format
output format for the frames, eg "pdb" or "gro" [pdb]
maxframes : int
maximum number of frames that are extracted to disk at
one time; set to ``None`` to extract the whole trajectory
at once. [``None``]
kwargs
All other arguments are passed to
`class:~gromacs.tools.Trjconv`; the only options that
cannot be changed are *sep* and the output file name *o*.
"""
self.structure = structure # tpr or equivalent
self.trj = trj # xtc, trr, ...
self.maxframes = maxframes
if self.maxframes is not None:
raise NotImplementedError('sorry, maxframes feature not implemented yet')
self.framedir = tempfile.mkdtemp(prefix="Frames_", suffix='_'+format)
self.frameprefix = os.path.join(self.framedir, 'frame')
self.frametemplate = self.frameprefix + '%d' + '.' + format # depends on trjconv
self.frameglob = self.frameprefix + '*' + '.' + format
kwargs['sep'] = True
kwargs['o'] = self.frameprefix + '.' + format
kwargs.setdefault('input', ('System',))
self.extractor = tools.Trjconv(s=self.structure, f=self.trj, **kwargs)
#: Holds the current frame number of the currently extracted
#: batch of frames. Increases when iterating.
self.framenumber = 0
#: Total number of frames read so far; only important when *maxframes* > 0 is used.
self.totalframes = 0
def extract(self):
"""Extract frames from the trajectory to the temporary directory."""
# XXX: extract everything at the moment, logic for maxframes not done yet
self.extractor.run()
@property
def all_frames(self):
"""Unordered list of all frames currently held on disk."""
return glob.glob(self.frameglob)
@property
def current_framename(self):
return self.frametemplate % self.framenumber
def __iter__(self):
"""Primitive iterator."""
frames = self.all_frames
if len(frames) == 0:
self.extract()
frames = self.all_frames
# filenames are 'Frame0.pdb', 'Frame11.pdb', ... so I must
# order manually because glob does not give it in sequence.
for i in xrange(len(frames)):
self.framenumber = i
yield self.current_framename
self.totalframes += len(frames)
def delete_frames(self):
"""Delete all frames."""
for frame in glob.glob(self.frameglob):
os.unlink(frame)
def cleanup(self):
"""Clean up all temporary frames (which can be HUGE)."""
shutil.rmtree(self.framedir)
self.framedir = None
def __del__(self):
if self.framedir is not None:
self.cleanup()
# Working with topologies
# -----------------------
# grompp that does not raise an exception; setting up runs the command to get the docs so
# we only want to do this once at the module level and not inside a function that can be called
# repeatedly
grompp_warnonly = tools.Grompp(failure="warn")
# grompp_warnonly.__doc__ += "\n\ngrompp wrapper that only warns on failure but does not raise :exc:`GromacsError`"
def grompp_qtot(*args, **kwargs):
r"""Run ``gromacs.grompp`` and return the total charge of the system.
:Arguments:
The arguments are the ones one would pass to :func:`gromacs.grompp`.
:Returns:
The total charge as reported
Some things to keep in mind:
* The stdout output of grompp is only shown when an error occurs. For
debugging, look at the log file or screen output and try running the
normal :func:`gromacs.grompp` command and analyze the output if the
debugging messages are not sufficient.
* Check that ``qtot`` is correct. Because the function is based on pattern
matching of the informative output of :program:`grompp` it can break when
the output format changes. This version recognizes lines like ::
' System has non-zero total charge: -4.000001e+00'
using the regular expression
:regexp:`System has non-zero total charge: *(?P<qtot>[-+]?\d*\.\d+([eE][-+]\d+)?)`.
"""
qtot_pattern = re.compile(r"System has non-zero total charge: *(?P<qtot>[-+]?\d*\.\d+([eE][-+]\d+)?)")
# make sure to capture ALL output
kwargs['stdout'] = False
kwargs['stderr'] = False
rc, output, error = grompp_warnonly(*args, **kwargs)
gmxoutput = "\n".join([x for x in [output, error] if x is not None])
if rc != 0:
# error occured and we want to see the whole output for debugging
msg = "grompp_qtot() failed. See warning and screen output for clues."
logger.error(msg)
import sys
sys.stderr.write("=========== grompp (stdout/stderr) ============\n")
sys.stderr.write(gmxoutput)
sys.stderr.write("===============================================\n")
sys.stderr.flush()
raise GromacsError(rc, msg)
qtot = 0
for line in gmxoutput.split('\n'):
m = qtot_pattern.search(line)
if m:
qtot = float(m.group('qtot'))
break
logger.info("system total charge qtot = {qtot!r}".format(**vars()))
return qtot
def _mdp_include_string(dirs):
"""Generate a string that can be added to a mdp 'include = ' line."""
include_paths = [os.path.expanduser(p) for p in dirs]
return ' -I'.join([''] + include_paths)
def add_mdp_includes(topology=None, kwargs=None):
"""Set the mdp *include* key in the *kwargs* dict.
1. Add the directory containing *topology*.
2. Add all directories appearing under the key *includes*
3. Generate a string of the form "-Idir1 -Idir2 ..." that
is stored under the key *include* (the corresponding
mdp parameter)
By default, the directories ``.`` and ``..`` are also added to the
*include* string for the mdp; when fed into
:func:`gromacs.cbook.edit_mdp` it will result in a line such as ::
include = -I. -I.. -I../topology_dir ....
Note that the user can always override the behaviour by setting
the *include* keyword herself; in this case this function does
nothing.
If no *kwargs* were supplied then a dict is generated with the
single *include* entry.
:Arguments:
*topology* : top filename
Topology file; the name of the enclosing directory is added
to the include path (if supplied) [``None``]
*kwargs* : dict
Optional dictionary of mdp keywords; will be modified in place.
If it contains the *includes* keyword with either a single string
or a list of strings then these paths will be added to the
include statement.
:Returns: *kwargs* with the *include* keyword added if it did not
exist previously; if the keyword already existed, nothing
happens.
.. Note:: The *kwargs* dict is **modified in place**. This
function is a bit of a hack. It might be removed once
all setup functions become methods in a nice class.
"""
if kwargs is None:
kwargs = {}
include_dirs = ['.', '..'] # should . & .. always be added?
if topology is not None:
# half-hack: find additional itps in the same directory as the topology
topology_dir = os.path.dirname(topology)
include_dirs.append(topology_dir)
include_dirs.extend(asiterable(kwargs.pop('includes', []))) # includes can be a list or a string
# 1. setdefault: we do nothing if user defined include
# 2. modify input in place!
kwargs.setdefault('include', _mdp_include_string(include_dirs))
return kwargs
def filter_grompp_options(**kwargs):
"""Returns one dictionary only containing valid :program:`grompp` options and everything else.
Option list is hard coded and nased on :class:`~gromacs.tools.grompp` 4.5.3.
:Returns: ``(grompp_dict, other_dict)``
.. versionadded:: 0.2.4
"""
grompp_options = ('f','po','c','r','rb','n','p','pp','o','t','e', # files
'h', 'noh', 'version', 'noversion', 'nice', 'v', 'nov',
'time', 'rmvsbds', 'normvsbds', 'maxwarn', 'zero', 'nozero',
'renum', 'norenum')
grompp = dict((k,v) for k,v in kwargs.items() if k in grompp_options)
other = dict((k,v) for k,v in kwargs.items() if k not in grompp_options)
return grompp, other
def create_portable_topology(topol, struct, **kwargs):
"""Create a processed topology.
The processed (or portable) topology file does not contain any
``#include`` statements and hence can be easily copied around. It
also makes it possible to re-grompp without having any special itp
files available.
:Arguments:
*topol*
topology file
*struct*
coordinate (structure) file
:Keywords:
*processed*
name of the new topology file; if not set then it is named like
*topol* but with ``pp_`` prepended
*includes*
path or list of paths of directories in which itp files are
searched for
*grompp_kwargs**
other options for :program:`grompp` such as ``maxwarn=2`` can
also be supplied
:Returns: full path to the processed topology
"""
_topoldir, _topol = os.path.split(topol)
processed = kwargs.pop('processed', os.path.join(_topoldir, 'pp_'+_topol))
grompp_kwargs, mdp_kwargs = filter_grompp_options(**kwargs)
mdp_kwargs = add_mdp_includes(topol, mdp_kwargs)
with tempfile.NamedTemporaryFile(suffix='.mdp', mode='wb') as mdp:
mdp.write('; empty mdp file\ninclude = {include!s}\n'.format(**mdp_kwargs).encode('utf-8'))
mdp.flush()
grompp_kwargs['p'] = topol
grompp_kwargs['pp'] = processed
grompp_kwargs['f'] = mdp.name
grompp_kwargs['c'] = struct
grompp_kwargs['v'] = False
try:
gromacs.grompp(**grompp_kwargs)
finally:
utilities.unlink_gmx('topol.tpr', 'mdout.mdp')
return utilities.realpath(processed)
def get_volume(f):
"""Return the volume in nm^3 of structure file *f*.
(Uses :func:`gromacs.editconf`; error handling is not good)
"""
fd, temp = tempfile.mkstemp('.gro')
try:
rc,out,err = gromacs.editconf(f=f, o=temp, stdout=False)
finally:
os.unlink(temp)
return [float(x.split()[1]) for x in out.splitlines()
if x.startswith('Volume:')][0]
# Editing textual input files
# ---------------------------
def edit_mdp(mdp, new_mdp=None, extend_parameters=None, **substitutions):
r"""Change values in a Gromacs mdp file.
Parameters and values are supplied as substitutions, eg ``nsteps=1000``.
By default the template mdp file is **overwritten in place**.
If a parameter does not exist in the template then it cannot be substituted
and the parameter/value pair is returned. The user has to check the
returned list in order to make sure that everything worked as expected. At
the moment it is not possible to automatically append the new values to the
mdp file because of ambiguities when having to replace dashes in parameter
names with underscores (see the notes below on dashes/underscores).
If a parameter is set to the value ``None`` then it will be ignored.
:Arguments:
*mdp* : filename
filename of input (and output filename of ``new_mdp=None``)
*new_mdp* : filename
filename of alternative output mdp file [None]
*extend_parameters* : string or list of strings
single parameter or list of parameters for which the new values
should be appended to the existing value in the mdp file. This
makes mostly sense for a single parameter, namely 'include', which
is set as the default. Set to ``[]`` to disable. ['include']
*substitutions*
parameter=value pairs, where parameter is defined by the Gromacs
mdp file; dashes in parameter names have to be replaced by
underscores. If a value is a list-like object then the items are
written as a sequence, joined with spaces, e.g. ::
ref_t=[310,310,310] ---> ref_t = 310 310 310
:Returns:
Dict of parameters that have *not* been substituted.
**Example** ::
edit_mdp('md.mdp', new_mdp='long_md.mdp', nsteps=100000, nstxtcout=1000, lincs_iter=2)
.. Note::
* Dashes in Gromacs mdp parameters have to be replaced by an underscore
when supplied as python keyword arguments (a limitation of python). For example
the MDP syntax is ``lincs-iter = 4`` but the corresponding keyword would be
``lincs_iter = 4``.
* If the keyword is set as a dict key, eg ``mdp_params['lincs-iter']=4`` then one
does not have to substitute.
* Parameters *aa_bb* and *aa-bb* are considered the same (although this should
not be a problem in practice because there are no mdp parameters that only
differ by a underscore).
* This code is more compact in ``Perl`` as one can use ``s///`` operators:
``s/^(\s*${key}\s*=\s*).*/$1${val}/``
.. SeeAlso:: One can also load the mdp file with
:class:`gromacs.formats.MDP`, edit the object (a dict), and save it again.
"""
if new_mdp is None:
new_mdp = mdp
if extend_parameters is None:
extend_parameters = ['include']
else:
extend_parameters = list(asiterable(extend_parameters))
# None parameters should be ignored (simple way to keep the template defaults)
substitutions = {k: v for k,v in substitutions.items() if v is not None}
params = list(substitutions.keys()) # list will be reduced for each match
def demangled(p):
"""Return a RE string that matches the parameter."""
return p.replace('_', '[-_]') # must catch either - or _
patterns = {parameter:
re.compile(r"""
(?P<assignment>\s*{0!s}\s*=\s*) # parameter == everything before the value
(?P<value>[^;]*) # value (stop before comment=;)
(?P<comment>\s*;.*)? # optional comment
""".format(demangled(parameter)), re.VERBOSE)
for parameter in substitutions}
with tempfile.TemporaryFile() as target:
with open(mdp, 'rb') as src:
logger.info("editing mdp = {0!r}: {1!r}".format(mdp, substitutions.keys()))
for line in src:
line = line.decode('utf-8')
new_line = line.strip() # \n must be stripped to ensure that new line is built without break
for p in params[:]:
m = patterns[p].match(new_line)
if m:
# I am too stupid to replace a specific region in the string so I rebuild it
# (matching a line and then replacing value requires TWO re calls)
#print 'line:' + new_line
#print m.groupdict()
if m.group('comment') is None:
comment = ''
else:
comment = " "+m.group('comment')
assignment = m.group('assignment')
if not assignment.endswith(' '):
assignment += ' '
# build new line piece-wise:
new_line = assignment
if p in extend_parameters:
# keep original value and add new stuff at end
new_line += str(m.group('value')) + ' '
# automatically transform lists into space-separated string values
value = " ".join(map(str, asiterable(substitutions[p])))
new_line += value + comment
params.remove(p)
break
target.write((new_line+'\n').encode('utf-8'))
target.seek(0)
# XXX: Is there a danger of corrupting the original mdp if something went wrong?
with open(new_mdp, 'wb') as final:
shutil.copyfileobj(target, final)
# return all parameters that have NOT been substituted
if len(params) > 0:
logger.warn("Not substituted in {new_mdp!r}: {params!r}".format(**vars()))
return {p: substitutions[p] for p in params}
def edit_txt(filename, substitutions, newname=None):
"""Primitive text file stream editor.
This function can be used to edit free-form text files such as the
topology file. By default it does an **in-place edit** of
*filename*. If *newname* is supplied then the edited
file is written to *newname*.
:Arguments:
*filename*
input text file
*substitutions*
substitution commands (see below for format)
*newname*
output filename; if ``None`` then *filename* is changed in
place [``None``]
*substitutions* is a list of triplets; the first two elements are regular
expression strings, the last is the substitution value. It mimics
``sed`` search and replace. The rules for *substitutions*:
.. productionlist::
substitutions: "[" search_replace_tuple, ... "]"
search_replace_tuple: "(" line_match_RE "," search_RE "," replacement ")"
line_match_RE: regular expression that selects the line (uses match)
search_RE: regular expression that is searched in the line
replacement: replacement string for search_RE
Running :func:`edit_txt` does pretty much what a simple ::
sed /line_match_RE/s/search_RE/replacement/
with repeated substitution commands does.
Special replacement values:
- ``None``: the rule is ignored
- ``False``: the line is deleted (even if other rules match)
.. note::
* No sanity checks are performed and the substitutions must be supplied
exactly as shown.
* All substitutions are applied to a line; thus the order of the substitution
commands may matter when one substitution generates a match for a subsequent rule.
* If replacement is set to ``None`` then the whole expression is ignored and
whatever is in the template is used. To unset values you must provided an
empty string or similar.
* Delete a matching line if replacement=``False``.
"""
if newname is None:
newname = filename
# No sanity checks (figure out later how to give decent diagnostics).
# Filter out any rules that have None in replacement.
_substitutions = [{'lRE': re.compile(str(lRE)),
'sRE': re.compile(str(sRE)),
'repl': repl}
for lRE,sRE,repl in substitutions if repl is not None]
with tempfile.TemporaryFile() as target:
with open(filename, 'rb') as src:
logger.info("editing txt = {0!r} ({1:d} substitutions)".format(filename, len(substitutions)))
for line in src:
line = line.decode("utf-8")
keep_line = True
for subst in _substitutions:
m = subst['lRE'].match(line)
if m: # apply substition to this line?
logger.debug('match: '+line.rstrip())
if subst['repl'] is False: # special rule: delete line
keep_line = False
else: # standard replacement
line = subst['sRE'].sub(str(subst['repl']), line)
logger.debug('replaced: '+line.rstrip())
if keep_line:
target.write(line.encode('utf-8'))
else:
logger.debug("Deleting line %r", line)
target.seek(0)
with open(newname, 'wb') as final:
shutil.copyfileobj(target, final)
logger.info("edited txt = {newname!r}".format(**vars()))
def remove_molecules_from_topology(filename, **kwargs):
r"""Remove autogenerated [ molecules ] entries from *filename*.
Valid entries in ``[ molecules ]`` below the default *marker*
are removed. For example, a topology file such as ::
[ molecules ]
Protein 1
SOL 213
; The next line is the marker!
; Gromacs auto-generated entries follow:
SOL 12345
NA+ 15
CL- 16
; This is a comment that is NOT deleted.
SOL 333
would become::
[ molecules ]
Protein 1
SOL 213
; The next line is the marker!
; Gromacs auto-generated entries follow:
; This is a comment that is NOT deleted.
Valid molecule lines look like ``SOL 1234``, ``NA 17`` etc. The
actual regular expression used is "\s*[\w+_-]+\s+\d+\s*(;.*)?$".
In order to use this function, the marker line has to be manually
added to the topology file.
:Arguments:
*filename*
The topology file that includes the ``[ molecules ]`` section.
It is **edited in place**.
*marker*
Any ``[ molecules ]`` entries below this pattern (python regular
expression) are removed. Leading white space is ignored. ``None``
uses the default as described above.
"""
marker = kwargs.pop('marker', None)
if marker is None:
marker = "; Gromacs auto-generated entries follow:"
logger.debug("Scrubbed [ molecules ]: marker = %(marker)r", vars())
p_marker = re.compile(r"\s*{0!s}".format(marker))
p_molecule = re.compile(r"\s*[\w+_-]+\s+\d+\s*(;.*)?$")
with tempfile.TemporaryFile() as target:
with open(filename, 'rb') as src:
autogenerated = False
n_removed = 0
for line in src:
line = line.decode('utf-8')
if p_marker.match(line):
autogenerated = True
if autogenerated and p_molecule.match(line):
n_removed += 1
continue # remove by skipping
target.write(line.encode('utf-8'))
if autogenerated and n_removed > 0:
target.seek(0)
with open(filename, 'wb') as final: # overwrite original!
shutil.copyfileobj(target, final)
logger.info("Removed %(n_removed)d autogenerated [ molecules ] from "
"topol = %(filename)r" % vars())
return n_removed
# Working with index files and index groups
# -----------------------------------------
#
#: compiled regular expression to match a list of index groups
#: in the output of ``make_ndx``s <Enter> (empty) command.
NDXLIST = re.compile(r""">\s+\n # '> ' marker line from '' input (input not echoed)
\n # empty line
(?P<LIST> # list of groups
( # consists of repeats of the same pattern:
\s*\d+ # group number
\s+[^\s]+\s*: # group name, separator ':'
\s*\d+\satoms # number of atoms in group
\n
)+ # multiple repeats
)""", re.VERBOSE)
#: compiled regular expression to match a single line of
#: ``make_ndx`` output (e.g. after a successful group creation)
NDXGROUP = re.compile(r"""
\s*(?P<GROUPNUMBER>\d+) # group number
\s+(?P<GROUPNAME>[^\s]+)\s*: # group name, separator ':'
\s*(?P<NATOMS>\d+)\satoms # number of atoms in group
""", re.VERBOSE)
def make_ndx_captured(**kwargs):
"""make_ndx that captures all output
Standard :func:`~gromacs.make_ndx` command with the input and
output pre-set in such a way that it can be conveniently used for
:func:`parse_ndxlist`.
Example::
ndx_groups = parse_ndxlist(make_ndx_captured(n=ndx)[0])
Note that the convenient :func:`get_ndx_groups` function does exactly
that and can probably used in most cases.
:Arguments:
keywords are passed on to :func:`~gromacs.make_ndx`
:Returns:
(*returncode*, *output*, ``None``)
"""
kwargs['stdout']=False # required for proper output as described in doc
user_input = kwargs.pop('input',[])
user_input = [cmd for cmd in user_input if cmd != 'q'] # filter any quit
kwargs['input'] = user_input + ['', 'q'] # necessary commands
return gromacs.make_ndx(**kwargs)
def get_ndx_groups(ndx, **kwargs):
"""Return a list of index groups in the index file *ndx*.
:Arguments:
- *ndx* is a Gromacs index file.
- kwargs are passed to :func:`make_ndx_captured`.
:Returns:
list of groups as supplied by :func:`parse_ndxlist`
Alternatively, load the index file with
:class:`gromacs.formats.NDX` for full control.
"""
fd, tmp_ndx = tempfile.mkstemp(suffix='.ndx')
kwargs['o'] = tmp_ndx
try:
g = parse_ndxlist(make_ndx_captured(n=ndx, **kwargs)[1])
finally:
utilities.unlink_gmx(tmp_ndx)
return g
def parse_ndxlist(output):
"""Parse output from make_ndx to build list of index groups::
groups = parse_ndxlist(output)
output should be the standard output from ``make_ndx``, e.g.::
rc,output,junk = gromacs.make_ndx(..., input=('', 'q'), stdout=False, stderr=True)
(or simply use
rc,output,junk = cbook.make_ndx_captured(...)
which presets input, stdout and stderr; of course input can be overriden.)
:Returns:
The function returns a list of dicts (``groups``) with fields
name
name of the groups
nr
number of the group (starts at 0)
natoms
number of atoms in the group
"""
m = NDXLIST.search(output) # make sure we pick up a proper full list
grouplist = m.group('LIST')
return parse_groups(grouplist)
def parse_groups(output):
"""Parse ``make_ndx`` output and return groups as a list of dicts."""
groups = []
for line in output.split('\n'):
m = NDXGROUP.match(line)
if m:
d = m.groupdict()
groups.append({'name': d['GROUPNAME'],
'nr': int(d['GROUPNUMBER']),
'natoms': int(d['NATOMS'])})
return groups
class IndexBuilder(object):
"""Build an index file with specified groups and the combined group.
This is *not* a full blown selection parser a la Charmm, VMD or
MDAnalysis but a very quick hack.
**Example**
How to use the :class:`IndexBuilder`::
G = gromacs.cbook.IndexBuilder('md_posres.pdb',
['S312:OG','T313:OG1','A38:O','A309:O','@a62549 & r NA'],
offset=-9, out_ndx='selection.ndx')
groupname, ndx = G.combine()
del G
The residue numbers are given with their canonical resids from the
sequence or pdb. *offset=-9* says that one calculates Gromacs topology
resids by subtracting 9 from the canonical resid.
The combined selection is ``OR`` ed by default and written to
*selection.ndx*. One can also add all the groups in the initial *ndx*
file (or the :program:`make_ndx` default groups) to the output (see the
*defaultgroups* keyword for :meth:`IndexBuilder.combine`).
Generating an index file always requires calling
:meth:`~IndexBuilder.combine` even if there is only a single group.
Deleting the class removes all temporary files associated with it (see
:attr:`IndexBuilder.indexfiles`).
:Raises:
If an empty group is detected (which does not always work) then a
:exc:`gromacs.BadParameterWarning` is issued.
:Bugs:
If ``make_ndx`` crashes with an unexpected error then this is fairly hard to
diagnose. For instance, in certain cases it segmentation faults when a tpr
is provided as a *struct* file and the resulting error messages becomes ::
GromacsError: [Errno -11] Gromacs tool failed
Command invocation: make_ndx -o /tmp/tmp_Na1__NK7cT3.ndx -f md_posres.tpr
In this case run the command invocation manually to see what the problem
could be.
.. SeeAlso:: In some cases it might be more straightforward to use
:class:`gromacs.formats.NDX`.
"""
def __init__(self, struct=None, selections=None, names=None, name_all=None,
ndx=None, out_ndx="selection.ndx", offset=0):
"""Build a index group from the selection arguments.
If selections and a structure file are supplied then the individual
selections are constructed with separate calls to
:func:`gromacs.make_ndx`. Use :meth:`IndexBuilder.combine` to combine
them into a joint selection or :meth:`IndexBuilder.write` to simply write
out the individual named selections (useful with *names*).
:Arguments:
*struct* : filename
Structure file (tpr, pdb, ...)
*selections* : list
The list must contain strings or tuples, which must be be one of
the following constructs:
"<1-letter aa code><resid>[:<atom name]"
Selects the CA of the residue or the specified atom
name.
example: ``"S312:OA"`` or ``"A22"`` (equivalent to ``"A22:CA"``)
("<1-letter aa code><resid>", "<1-letter aa code><resid>, ["<atom name>"])
Selects a *range* of residues. If only two residue
identifiers are provided then all atoms are
selected. With an optional third atom identifier,
only this atom anme is selected for each residue
in the range. [EXPERIMENTAL]
"@<make_ndx selection>"
The ``@`` letter introduces a verbatim ``make_ndx``
command. It will apply the given selection without any
further processing or checks.
example: ``"@a 6234 - 6238"`` or ``'@"SOL"'`` (note the quoting)
or ``"@r SER & r 312 & t OA"``.
*names* : list
Strings to name the selections; if not supplied or if individuals
are ``None`` then a default name is created. When simply using
:meth:`IndexBuilder.write` then these should be supplied.
*name_all* : string
Name of the group that is generated by :meth:`IndexBuilder.combine`.
*offset* : int, dict
This number is added to the resids in the first selection scheme; this
allows names to be the same as in a crystal structure. If offset is a
dict then it is used to directly look up the resids.
*ndx* : filename or list of filenames
Optional input index file(s).
*out_ndx* : filename
Output index file.
"""
self.structure = struct
self.ndx = ndx
self.output = out_ndx
self.name_all = name_all
#: *offset* as a number is added to the resids in the first selection
#: scheme; this
#: allows names to be the same as in a crystal structure. If *offset* is a
#: dict then it is used to directly look up the resids. Use :meth:`gmx_resid`
#: to transform a crystal resid to a gromacs resid.
#:
#: The attribute may be changed directly after init.
self.offset = offset
#: Auto-labelled groups use this counter.
self._command_counter = 0
if selections is None:
selections = []
if not utilities.iterable(selections):
selections = [selections]
self.selections = selections
if names is None:
names = [None] * len(selections)
#: Specialized ``make_ndx`` that always uses same structure
#: and redirection (can be overridden)
self.make_ndx = tools.Make_ndx(f=self.structure, n=self.ndx,
stdout=False, stderr=False)
#: dict, keyed by group name and pointing to index file for group
#: (Groups are built in separate files because that is more robust
#: as I can clear groups easily.)
self.indexfiles = dict([self.parse_selection(selection, name)
for selection, name in zip(selections, names)])
@property
def names(self):
"""Names of all generated index groups."""
return self.indexfiles.keys()
def gmx_resid(self, resid):
"""Returns resid in the Gromacs index by transforming with offset."""
try:
gmx_resid = int(self.offset[resid])
except (TypeError, IndexError):
gmx_resid = resid + self.offset
except KeyError:
raise KeyError("offset must be a dict that contains the gmx resid for {0:d}".format(resid))
return gmx_resid
def combine(self, name_all=None, out_ndx=None, operation='|', defaultgroups=False):
"""Combine individual groups into a single one and write output.
:Keywords:
name_all : string
Name of the combined group, ``None`` generates a name. [``None``]
out_ndx : filename
Name of the output file that will contain the individual groups
and the combined group. If ``None`` then default from the class
constructor is used. [``None``]
operation : character
Logical operation that is used to generate the combined group from
the individual groups: "|" (OR) or "&" (AND); if set to ``False``
then no combined group is created and only the individual groups
are written. ["|"]
defaultgroups : bool
``True``: append everything to the default groups produced by
:program:`make_ndx` (or rather, the groups provided in the ndx file on
initialization --- if this was ``None`` then these are truly default groups);
``False``: only use the generated groups
:Returns:
``(combinedgroup_name, output_ndx)``, a tuple showing the
actual group name and the name of the file; useful when all names are autogenerated.
.. Warning:: The order of the atom numbers in the combined group is
*not* guaranteed to be the same as the selections on input because
``make_ndx`` sorts them ascending. Thus you should be careful when
using these index files for calculations of angles and dihedrals.
Use :class:`gromacs.formats.NDX` in these cases.
.. SeeAlso:: :meth:`IndexBuilder.write`.
"""
if not operation in ('|', '&', False):
raise ValueError("Illegal operation {0!r}, only '|' (OR) and '&' (AND) or False allowed.".format(
operation))
if name_all is None and operation:
name_all = self.name_all or operation.join(self.indexfiles)
if out_ndx is None:
out_ndx = self.output
if defaultgroups:
# make a default file (using the original ndx where provided!!)
fd, default_ndx = tempfile.mkstemp(suffix='.ndx', prefix='default__')
try:
self.make_ndx(o=default_ndx, input=['q'])
except:
utilities.unlink_gmx(default_ndx)
raise
ndxfiles = [default_ndx]
else:
ndxfiles = []
ndxfiles.extend(self.indexfiles.values())
if operation:
# combine multiple selections and name them
try:
fd, tmp_ndx = tempfile.mkstemp(suffix='.ndx', prefix='combined__')
# combine all selections by loading ALL temporary index files
operation = ' '+operation.strip()+' '
cmd = [operation.join(['"{0!s}"'.format(gname) for gname in self.indexfiles]),
'', 'q']
rc,out,err = self.make_ndx(n=ndxfiles, o=tmp_ndx, input=cmd)
if self._is_empty_group(out):
warnings.warn("No atoms found for {cmd!r}".format(**vars()),
category=BadParameterWarning)
# second pass for naming, sigh (or: use NDX ?)
groups = parse_ndxlist(out)
last = groups[-1]
# name this group
name_cmd = ["name {0:d} {1!s}".format(last['nr'], name_all), 'q']
rc,out,err = self.make_ndx(n=tmp_ndx, o=out_ndx, input=name_cmd)
# For debugging, look at out and err or set stdout=True, stderr=True
# TODO: check out if at least 1 atom selected
##print "DEBUG: combine()"
##print out
finally:
utilities.unlink_gmx(tmp_ndx)
if defaultgroups:
utilities.unlink_gmx(default_ndx)
else:
# just write individual groups in one file (name_all --> None)
rc,out,err = self.make_ndx(n=ndxfiles, o=out_ndx, input=['','q'])
return name_all, out_ndx
def write(self, out_ndx=None, defaultgroups=False):
"""Write individual (named) groups to *out_ndx*."""
name_all, out_ndx = self.combine(operation=False, out_ndx=out_ndx, defaultgroups=defaultgroups)
return out_ndx
def cat(self, out_ndx=None):
"""Concatenate input index files.
Generate a new index file that contains the default Gromacs index
groups (if a structure file was defined) and all index groups from the
input index files.
:Arguments:
out_ndx : filename
Name of the output index file; if ``None`` then use the default
provided to the constructore. [``None``].
"""
if out_ndx is None:
out_ndx = self.output
self.make_ndx(o=out_ndx, input=['q'])
return out_ndx
def parse_selection(self, selection, name=None):
"""Retuns (groupname, filename) with index group."""
if type(selection) is tuple:
# range
process = self._process_range
elif selection.startswith('@'):
# verbatim make_ndx command
process = self._process_command
selection = selection[1:]
else:
process = self._process_residue
return process(selection, name)
def _process_command(self, command, name=None):
"""Process ``make_ndx`` command and return name and temp index file."""
self._command_counter += 1
if name is None:
name = "CMD{0:03d}".format(self._command_counter)
# Need to build it with two make_ndx calls because I cannot reliably
# name the new group without knowing its number.
try:
fd, tmp_ndx = tempfile.mkstemp(suffix='.ndx', prefix='tmp_'+name+'__')
cmd = [command, '', 'q'] # empty command '' necessary to get list
# This sometimes fails with 'OSError: Broken Pipe' --- hard to debug
rc,out,err = self.make_ndx(o=tmp_ndx, input=cmd)
self.check_output(out, "No atoms found for selection {command!r}.".format(**vars()), err=err)
# For debugging, look at out and err or set stdout=True, stderr=True
# TODO: check ' 0 r_300_&_ALA_&_O : 1 atoms' has at least 1 atom
##print "DEBUG: _process_command()"
##print out
groups = parse_ndxlist(out)
last = groups[-1]
# reduce and name this group
fd, ndx = tempfile.mkstemp(suffix='.ndx', prefix=name+'__')
name_cmd = ["keep {0:d}".format(last['nr']),
"name 0 {0!s}".format(name), 'q']
rc,out,err = self.make_ndx(n=tmp_ndx, o=ndx, input=name_cmd)
finally:
utilities.unlink_gmx(tmp_ndx)
return name, ndx
#: regular expression to match and parse a residue-atom selection
RESIDUE = re.compile(r"""
(?P<aa>([ACDEFGHIKLMNPQRSTVWY]) # 1-letter amino acid
| # or
([A-Z][A-Z][A-Z][A-Z]?) # 3-letter or 4-letter residue name
)
(?P<resid>\d+) # resid
(: # separator ':'
(?P<atom>\w+) # atom name
)? # possibly one
""", re.VERBOSE | re.IGNORECASE)
def _process_residue(self, selection, name=None):
"""Process residue/atom selection and return name and temp index file."""
if name is None:
name = selection.replace(':', '_')
# XXX: use _translate_residue() ....
m = self.RESIDUE.match(selection)
if not m:
raise ValueError("Selection {selection!r} is not valid.".format(**vars()))
gmx_resid = self.gmx_resid(int(m.group('resid')))
residue = m.group('aa')
if len(residue) == 1:
gmx_resname = utilities.convert_aa_code(residue) # only works for AA
else:
gmx_resname = residue # use 3-letter for any resname
gmx_atomname = m.group('atom')
if gmx_atomname is None:
gmx_atomname = 'CA'
#: select residue <gmx_resname><gmx_resid> atom <gmx_atomname>
_selection = 'r {gmx_resid:d} & r {gmx_resname!s} & a {gmx_atomname!s}'.format(**vars())
cmd = ['keep 0', 'del 0',
_selection,
'name 0 {name!s}'.format(**vars()),
'q']
fd, ndx = tempfile.mkstemp(suffix='.ndx', prefix=name+'__')
rc,out,err = self.make_ndx(n=self.ndx, o=ndx, input=cmd)
self.check_output(out, "No atoms found for "
"%(selection)r --> %(_selection)r" % vars())
# For debugging, look at out and err or set stdout=True, stderr=True
##print "DEBUG: _process_residue()"
##print out
return name, ndx
def _process_range(self, selection, name=None):
"""Process a range selection.
("S234", "A300", "CA") --> selected all CA in this range
("S234", "A300") --> selected all atoms in this range
.. Note:: Ignores residue type, only cares about the resid (but still required)
"""
try:
first, last, gmx_atomname = selection
except ValueError:
try:
first, last = selection
gmx_atomname = '*'
except:
logger.error("%r is not a valid range selection", selection)
raise
if name is None:
name = "{first!s}-{last!s}_{gmx_atomname!s}".format(**vars())
_first = self._translate_residue(first, default_atomname=gmx_atomname)
_last = self._translate_residue(last, default_atomname=gmx_atomname)
_selection = 'r {0:d} - {1:d} & & a {2!s}'.format(_first['resid'], _last['resid'], gmx_atomname)
cmd = ['keep 0', 'del 0',
_selection,
'name 0 {name!s}'.format(**vars()),
'q']
fd, ndx = tempfile.mkstemp(suffix='.ndx', prefix=name+'__')
rc,out,err = self.make_ndx(n=self.ndx, o=ndx, input=cmd)
self.check_output(out, "No atoms found for "
"%(selection)r --> %(_selection)r" % vars())
# For debugging, look at out and err or set stdout=True, stderr=True
##print "DEBUG: _process_residue()"
##print out
return name, ndx
def _translate_residue(self, selection, default_atomname='CA'):
"""Translate selection for a single res to make_ndx syntax."""
m = self.RESIDUE.match(selection)
if not m:
errmsg = "Selection {selection!r} is not valid.".format(**vars())
logger.error(errmsg)
raise ValueError(errmsg)
gmx_resid = self.gmx_resid(int(m.group('resid'))) # magic offset correction
residue = m.group('aa')
if len(residue) == 1:
gmx_resname = utilities.convert_aa_code(residue) # only works for AA
else:
gmx_resname = residue # use 3-letter for any resname
gmx_atomname = m.group('atom')
if gmx_atomname is None:
gmx_atomname = default_atomname
return {'resname':gmx_resname, 'resid':gmx_resid, 'atomname':gmx_atomname}
def check_output(self, make_ndx_output, message=None, err=None):
"""Simple tests to flag problems with a ``make_ndx`` run."""
if message is None:
message = ""
else:
message = '\n' + message
def format(output, w=60):
hrule = "====[ GromacsError (diagnostic output) ]".ljust(w,"=")
return hrule + '\n' + str(output) + hrule
rc = True
if self._is_empty_group(make_ndx_output):
warnings.warn("Selection produced empty group.{message!s}".format(**vars()), category=GromacsValueWarning)
rc = False
if self._has_syntax_error(make_ndx_output):
rc = False
out_formatted = format(make_ndx_output)
raise GromacsError("make_ndx encountered a Syntax Error, "
"%(message)s\noutput:\n%(out_formatted)s" % vars())
if make_ndx_output.strip() == "":
rc = False
out_formatted = format(err)
raise GromacsError("make_ndx produced no output, "
"%(message)s\nerror output:\n%(out_formatted)s" % vars())
return rc
def _is_empty_group(self, make_ndx_output):
m = re.search('Group is empty', make_ndx_output)
return m is not None
def _has_syntax_error(self, make_ndx_output):
m = re.search('Syntax error:', make_ndx_output)
return m is not None
def __del__(self):
try:
for path in self.indexfiles.values():
utilities.unlink_gmx(path)
# Removes auto-backup files, too (which we have because mkstemp creates
# an empty file and make_ndx backs that up).
except (AttributeError, OSError):
# all exceptions are ignored inside __del__ anyway but these
# two we do not even want to be noticed off:
# AttributeError: when reloading the module, OSError: when file disappeared
pass
class Transformer(utilities.FileUtils):
"""Class to handle transformations of trajectories.
1. Center, compact, and fit to reference structure in tpr
(optionally, only center in the xy plane): :meth:`~Transformer.center_fit`
2. Write compact xtc and tpr with water removed: :meth:`~Transformer.strip_water`
3. Write compact xtc and tpr only with protein: :meth:`~Transformer.keep_protein_only`
"""
def __init__(self, s="topol.tpr", f="traj.xtc", n=None, force=None,
dirname=os.path.curdir, outdir=None):
"""Set up Transformer with structure and trajectory.
Supply *n* = tpr, *f* = xtc (and *n* = ndx) relative to dirname.
:Keywords:
*s*
tpr file (or similar); note that this should not contain
position restraints if it is to be used with a reduced
system (see :meth:`~Transformer.strip_water`)
*f*
trajectory (xtc, trr, ...)
*n*
index file (it is typically safe to leave this as ``None``; in
cases where a trajectory needs to be centered on non-standard
groups this should contain those groups)
*force*
Set the default behaviour for handling existing files:
- ``True``: overwrite existing trajectories
- ``False``: throw a IOError exception
- ``None``: skip existing and log a warning [default]
*dirname*
directory in which all operations are performed, relative paths
are interpreted relative to *dirname* [.]
*outdir*
directory under which output files are placed; by default
the same directory where the input files live
"""
self.tpr = self.filename(s, ext="tpr", use_my_ext=True)
self.xtc = self.filename(f, ext="xtc", use_my_ext=True)
self.ndx = n
self.dirname = dirname
self.outdir = utilities.realpath(outdir) if outdir is not None else None
self.force = force
self.nowater = {} # data for trajectory stripped from water
self.proteinonly = {} # data for a protein-only trajectory
with utilities.in_dir(self.dirname, create=False):
for f in (self.tpr, self.xtc, self.ndx):
if f is None:
continue
if not os.path.exists(f):
msg = "Possible problem: File {f!r} not found in {dirname!r}.".format(**vars())
warnings.warn(msg, category=MissingDataWarning)
logger.warn(msg)
logger.info("%r initialised", self)
def __repr__(self):
return "{0!s}(s={1!r}, f={2!r}, n={3!r}, force={4!r})".format(self.__class__.__name__,
self.tpr, self.xtc, self.ndx, self.force)
def outfile(self, p):
"""Path for an output file.
If :attr:`outdir` is set then the path is
``outdir/basename(p)`` else just ``p``
"""
if self.outdir is not None:
return os.path.join(self.outdir, os.path.basename(p))
else:
return p
def rp(self, *args):
"""Return canonical path to file under *dirname* with components *args*
If *args* form an absolute path then just return it as the absolute path.
"""
try:
p = os.path.join(*args)
if os.path.isabs(p):
return p
except TypeError:
pass
return utilities.realpath(self.dirname, *args)
def center_fit(self, **kwargs):
"""Write compact xtc that is fitted to the tpr reference structure.
See :func:`gromacs.cbook.trj_fitandcenter` for details and
description of *kwargs* (including *input*, *input1*, *n* and
*n1* for how to supply custom index groups). The most important ones are listed
here but in most cases the defaults should work.
:Keywords:
*s*
Input structure (typically the default tpr file but can be set to
some other file with a different conformation for fitting)
*n*
Alternative index file.
*o*
Name of the output trajectory.
*xy* : Boolean
If ``True`` then only fit in xy-plane (useful for a membrane normal
to z). The default is ``False``.
*force*
- ``True``: overwrite existing trajectories
- ``False``: throw a IOError exception
- ``None``: skip existing and log a warning [default]
:Returns:
dictionary with keys *tpr*, *xtc*, which are the names of the
the new files
"""
kwargs.setdefault('s', self.tpr)
kwargs.setdefault('n', self.ndx)
kwargs['f'] = self.xtc
kwargs.setdefault('o', self.outfile(self.infix_filename(None, self.xtc, '_centfit', 'xtc')))
force = kwargs.pop('force', self.force)
logger.info("Centering and fitting trajectory {f!r}...".format(**kwargs))
with utilities.in_dir(self.dirname):
if not self.check_file_exists(kwargs['o'], resolve="indicate", force=force):
trj_fitandcenter(**kwargs)
logger.info("Centered and fit trajectory: {o!r}.".format(**kwargs))
return {'tpr': self.rp(kwargs['s']), 'xtc': self.rp(kwargs['o'])}
def fit(self, xy=False, **kwargs):
"""Write xtc that is fitted to the tpr reference structure.
Runs :class:`gromacs.tools.trjconv` with appropriate arguments
for fitting. The most important *kwargs* are listed
here but in most cases the defaults should work.
Note that the default settings do *not* include centering or
periodic boundary treatment as this often does not work well
with fitting. It is better to do this as a separate step (see
:meth:`center_fit` or :func:`gromacs.cbook.trj_fitandcenter`)
:Keywords:
*s*
Input structure (typically the default tpr file but can be set to
some other file with a different conformation for fitting)
*n*
Alternative index file.
*o*
Name of the output trajectory. A default name is created.
If e.g. *dt* = 100 is one of the *kwargs* then the default name includes
"_dt100ps".
*xy* : boolean
If ``True`` then only do a rot+trans fit in the xy plane
(good for membrane simulations); default is ``False``.
*force*
Override standard behavior (potentially dangerous)
- ``True``: overwrite existing trajectories
- ``False``: throw a IOError exception
- ``None``: skip existing and log a warning [default]
*fitgroup*
index group to fit on ["backbone"]
.. Note:: If keyword *input* is supplied then it will override
*fitgroup*; *input* = ``[fitgroup, outgroup]``
*kwargs*
kwargs are passed to :func:`~gromacs.cbook.trj_xyfitted`
:Returns:
dictionary with keys *tpr*, *xtc*, which are the names of the
the new files
"""
kwargs.setdefault('s', self.tpr)
kwargs.setdefault('n', self.ndx)
kwargs['f'] = self.xtc
force = kwargs.pop('force', self.force)
if xy:
fitmode = 'rotxy+transxy'
kwargs.pop('fit', None)
infix_default = '_fitxy'
else:
fitmode = kwargs.pop('fit', 'rot+trans') # user can use 'progressive', too
infix_default = '_fit'
dt = kwargs.get('dt')
if dt:
infix_default += '_dt{0:d}ps'.format(int(dt)) # dt in ps
kwargs.setdefault('o', self.outfile(self.infix_filename(None, self.xtc, infix_default, 'xtc')))
fitgroup = kwargs.pop('fitgroup', 'backbone')
kwargs.setdefault('input', [fitgroup, "system"])
if kwargs.get('center', False):
logger.warn("Transformer.fit(): center=%(center)r used: centering should not be combined with fitting.", kwargs)
if len(kwargs['inputs']) != 3:
logger.error("If you insist on centering you must provide three groups in the 'input' kwarg: (center, fit, output)")
raise ValuError("Insufficient index groups for centering,fitting,output")
logger.info("Fitting trajectory %r to with xy=%r...", kwargs['f'], xy)
logger.info("Fitting on index group %(fitgroup)r", vars())
with utilities.in_dir(self.dirname):
if self.check_file_exists(kwargs['o'], resolve="indicate", force=force):
logger.warn("File %r exists; force regenerating it with force=True.", kwargs['o'])
else:
gromacs.trjconv(fit=fitmode, **kwargs)
logger.info("Fitted trajectory (fitmode=%s): %r.", fitmode, kwargs['o'])
return {'tpr': self.rp(kwargs['s']), 'xtc': self.rp(kwargs['o'])}
def strip_water(self, os=None, o=None, on=None, compact=False,
resn="SOL", groupname="notwater", **kwargs):
"""Write xtc and tpr with water (by resname) removed.
:Keywords:
*os*
Name of the output tpr file; by default use the original but
insert "nowater" before suffix.
*o*
Name of the output trajectory; by default use the original name but
insert "nowater" before suffix.
*on*
Name of a new index file (without water).
*compact*
``True``: write a compact and centered trajectory
``False``: use trajectory as it is [``False``]
*centergroup*
Index group used for centering ["Protein"]
.. Note:: If *input* is provided (see below under *kwargs*)
then *centergroup* is ignored and the group for
centering is taken as the first entry in *input*.
*resn*
Residue name of the water molecules; all these residues are excluded.
*groupname*
Name of the group that is generated by subtracting all waters
from the system.
*force* : Boolean
- ``True``: overwrite existing trajectories
- ``False``: throw a IOError exception
- ``None``: skip existing and log a warning [default]
*kwargs*
are passed on to :func:`gromacs.cbook.trj_compact` (unless the
values have to be set to certain values such as s, f, n, o
keywords). The *input* keyword is always mangled: Only the first
entry (the group to centre the trajectory on) is kept, and as a
second group (the output group) *groupname* is used.
:Returns:
dictionary with keys *tpr*, *xtc*, *ndx* which are the names of the
the new files
.. warning:: The input tpr file should *not* have *any position restraints*;
otherwise Gromacs will throw a hissy-fit and say
*Software inconsistency error: Position restraint coordinates are
missing*
(This appears to be a bug in Gromacs 4.x.)
"""
force = kwargs.pop('force', self.force)
newtpr = self.outfile(self.infix_filename(os, self.tpr, '_nowater'))
newxtc = self.outfile(self.infix_filename(o, self.xtc, '_nowater'))
newndx = self.outfile(self.infix_filename(on, self.tpr, '_nowater', 'ndx'))
nowater_ndx = self._join_dirname(newtpr, "nowater.ndx") # refers to original tpr
if compact:
TRJCONV = trj_compact
# input overrides centergroup
if kwargs.get('centergroup') is not None and 'input' in kwargs:
logger.warn("centergroup = %r will be superceded by input[0] = %r", kwargs['centergroup'], kwargs['input'][0])
_input = kwargs.get('input', [kwargs.get('centergroup', 'Protein')])
kwargs['input'] = [_input[0], groupname] # [center group, write-out selection]
del _input
logger.info("Creating a compact trajectory centered on group %r", kwargs['input'][0])
logger.info("Writing %r to the output trajectory", kwargs['input'][1])
else:
TRJCONV = gromacs.trjconv
kwargs['input'] = [groupname]
logger.info("Writing %r to the output trajectory (no centering)", kwargs['input'][0])
# clean kwargs, only legal arguments for Gromacs tool trjconv should remain
kwargs.pop("centergroup", None)
NOTwater = "! r {resn!s}".format(**vars()) # make_ndx selection ("not water residues")
with utilities.in_dir(self.dirname):
# ugly because I cannot break from the block
if not self.check_file_exists(newxtc, resolve="indicate", force=force):
# make no-water index
B = IndexBuilder(struct=self.tpr, selections=['@'+NOTwater],
ndx=self.ndx, out_ndx=nowater_ndx)
B.combine(name_all=groupname, operation="|", defaultgroups=True)
logger.debug("Index file for water removal: %r", nowater_ndx)
logger.info("TPR file without water {newtpr!r}".format(**vars()))
gromacs.tpbconv(s=self.tpr, o=newtpr, n=nowater_ndx, input=[groupname])
logger.info("NDX of the new system %r", newndx)
gromacs.make_ndx(f=newtpr, o=newndx, input=['q'], stderr=False, stdout=False)
# PROBLEM: If self.ndx contained a custom group required for fitting then we are loosing
# this group here. We could try to merge only this group but it is possible that
# atom indices changed. The only way to solve this is to regenerate the group with
# a selection or only use Gromacs default groups.
logger.info("Trajectory without water {newxtc!r}".format(**vars()))
kwargs['s'] = self.tpr
kwargs['f'] = self.xtc
kwargs['n'] = nowater_ndx
kwargs['o'] = newxtc
TRJCONV(**kwargs)
logger.info("pdb and gro for visualization")
for ext in 'pdb', 'gro':
try:
# see warning in doc ... so we don't use the new xtc but the old one
kwargs['o'] = self.filename(newtpr, ext=ext)
TRJCONV(dump=0, stdout=False, stderr=False, **kwargs) # silent
except:
logger.exception("Failed building the water-less %(ext)s. "
"Position restraints in tpr file (see docs)?" % vars())
logger.info("strip_water() complete")
self.nowater[self.rp(newxtc)] = Transformer(dirname=self.dirname, s=newtpr,
f=newxtc, n=newndx, force=force)
return {'tpr':self.rp(newtpr), 'xtc':self.rp(newxtc), 'ndx':self.rp(newndx)}
# TODO: could probably unify strip_water() and keep_protein_only()
# (given that the latter was produced by copy&paste+search&replace...)
def keep_protein_only(self, os=None, o=None, on=None, compact=False,
groupname="proteinonly", **kwargs):
"""Write xtc and tpr only containing the protein.
:Keywords:
*os*
Name of the output tpr file; by default use the original but
insert "proteinonly" before suffix.
*o*
Name of the output trajectory; by default use the original name but
insert "proteinonly" before suffix.
*on*
Name of a new index file.
*compact*
``True``: write a compact and centered trajectory
``False``: use trajectory as it is [``False``]
*groupname*
Name of the protein-only group.
*keepalso*
List of literal make_ndx selections of additional groups that should
be kept, e.g. ['resname DRUG', 'atom 6789'].
*force* : Boolean
- ``True``: overwrite existing trajectories
- ``False``: throw a IOError exception
- ``None``: skip existing and log a warning [default]
*kwargs*
are passed on to :func:`gromacs.cbook.trj_compact` (unless the
values have to be set to certain values such as s, f, n, o
keywords). The *input* keyword is always mangled: Only the first
entry (the group to centre the trajectory on) is kept, and as a
second group (the output group) *groupname* is used.
:Returns:
dictionary with keys *tpr*, *xtc*, *ndx* which are the names of the
the new files
.. warning:: The input tpr file should *not* have *any position restraints*;
otherwise Gromacs will throw a hissy-fit and say
*Software inconsistency error: Position restraint coordinates are
missing*
(This appears to be a bug in Gromacs 4.x.)
"""
force = kwargs.pop('force', self.force)
suffix = 'proteinonly'
newtpr = self.outfile(self.infix_filename(os, self.tpr, '_'+suffix))
newxtc = self.outfile(self.infix_filename(o, self.xtc, '_'+suffix))
newndx = self.outfile(self.infix_filename(on, self.tpr, '_'+suffix, 'ndx'))
selection_ndx = suffix+".ndx" # refers to original tpr
if compact:
TRJCONV = trj_compact
_input = kwargs.get('input', ['Protein'])
kwargs['input'] = [_input[0], groupname] # [center group, write-out selection]
del _input
else:
TRJCONV = gromacs.trjconv
kwargs['input'] = [groupname]
selections = ['@'+sel for sel in ['"Protein"'] + kwargs.pop('keepalso',[])]
with utilities.in_dir(self.dirname):
# ugly because I cannot break from the block
if not self.check_file_exists(newxtc, resolve="indicate", force=force):
# make index (overkill for 'Protein' but maybe we want to enhance
# it in the future, e.g. with keeping ions/ligands as well?
B = IndexBuilder(struct=self.tpr, selections=selections,
ndx=self.ndx, out_ndx=selection_ndx)
B.combine(name_all=groupname, operation="|", defaultgroups=True)
logger.info("TPR file containg the protein {newtpr!r}".format(**vars()))
gromacs.tpbconv(s=self.tpr, o=newtpr, n=selection_ndx, input=[groupname])
logger.info("NDX of the new system {newndx!r}".format(**vars()))
gromacs.make_ndx(f=newtpr, o=newndx, input=['q'], stderr=False, stdout=False)
logger.info("Trajectory with only the protein {newxtc!r}".format(**vars()))
kwargs['s'] = self.tpr
kwargs['f'] = self.xtc
kwargs['n'] = selection_ndx
kwargs['o'] = newxtc
TRJCONV(**kwargs)
logger.info("pdb and gro for visualization")
for ext in 'pdb', 'gro':
try:
# see warning in doc ... so we don't use the new xtc but the old one
kwargs['o'] = self.filename(newtpr, ext=ext)
TRJCONV(dump=0, stdout=False, stderr=False, **kwargs) # silent
except:
logger.exception("Failed building the protein-only %(ext)s. "
"Position restraints in tpr file (see docs)?" % vars())
logger.info("keep_protein_only() complete")
self.proteinonly[self.rp(newxtc)] = Transformer(dirname=self.dirname, s=newtpr,
f=newxtc, n=newndx, force=force)
return {'tpr':self.rp(newtpr), 'xtc':self.rp(newxtc), 'ndx':self.rp(newndx)}
def strip_fit(self, **kwargs):
"""Strip water and fit to the remaining system.
First runs :meth:`strip_water` and then :meth:`fit`; see there
for arguments.
- *strip_input* is used for :meth:`strip_water` (but is only useful in
special cases, e.g. when there is no Protein group defined. Then set
*strip_input* = ``['Other']``.
- *input* is passed on to :meth:`fit` and can contain the
``[center_group, fit_group, output_group]``
- *fitgroup* is only passed to :meth:`fit` and just contains
the group to fit to ("backbone" by default)
.. warning:: *fitgroup* can only be a Gromacs default group and not
a custom group (because the indices change after stripping)
- By default *fit* = "rot+trans" (and *fit* is passed to :meth:`fit`,
together with the *xy* = ``False`` keyword)
.. Note:: The call signature of :meth:`strip_water` is somewhat different from this one.
"""
kwargs.setdefault('fit', 'rot+trans')
kw_fit = {}
for k in ('xy', 'fit', 'fitgroup', 'input'):
if k in kwargs:
kw_fit[k] = kwargs.pop(k)
kwargs['input'] = kwargs.pop('strip_input', ['Protein'])
kwargs['force'] = kw_fit['force'] = kwargs.pop('force', self.force)
paths = self.strip_water(**kwargs) # updates self.nowater
transformer_nowater = self.nowater[paths['xtc']] # make sure to get the one we just produced
return transformer_nowater.fit(**kw_fit) # use new Transformer's fit()
def _join_dirname(self, *args):
"""return os.path.join(os.path.dirname(args[0]), *args[1:])"""
# extra function because I need to use it in a method that defines
# the kwarg 'os', which collides with os.path...
return os.path.join(os.path.dirname(args[0]), *args[1:])
|
Becksteinlab/GromacsWrapper
|
gromacs/cbook.py
|
Python
|
gpl-3.0
| 90,385
|
[
"CHARMM",
"CRYSTAL",
"Gromacs",
"MDAnalysis",
"VMD"
] |
d68b4cd7a50b98c556388d255c8bcbbe76b49289dd1d44d83bc418be8fd5ad28
|
# coding: utf-8
"""Release data for the abiflows project."""
from collections import OrderedDict
# Name of the package for release purposes. This is the name which labels
# the tarballs and RPMs made by distutils, so it's best to lowercase it.
name = 'abiflows'
# version information. An empty _version_extra corresponds to a full
# release. 'dev' as a _version_extra string means this is a development version
_version_major = 0
_version_minor = 6
_version_micro = '' # use '' for first of series, number for 1 and above
#_version_extra = 'dev'
_version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
_ver = [_version_major, _version_minor]
if _version_micro: _ver.append(_version_micro)
if _version_extra: _ver.append(_version_extra)
__version__ = '.'.join(map(str, _ver))
version = __version__ # backwards compatibility name
# The minimum Abinit version compatible with AbiFlows
#min_abinit_version = "8.0.8"
description = "Framework for high-throughput calculations with ABINIT"
long_description = \
"""
The latest development version is always available from site <https://github.com/abinit/abiflows>
"""
license = 'GPL'
author = 'The Abinit group'
author_email = 'matteo.giantomassi@uclouvain.be'
maintainer = "Matteo Giantomassi"
maintainer_email = author_email
authors = OrderedDict([
('Guido', ('G. Petretto', 'nobody@nowhere')),
('David', ('D. Waroquiers', 'nobody@nowhere')),
('Matteo', ('M. Giantomassi', 'nobody@nowhere')),
('Michiel', ('M. J. van Setten', 'nobody@nowhere')),
])
url = "https://github.com/abinit/abiflows"
download_url = "https://github.com/abinit/abiflows"
platforms = ['Linux', 'darwin']
keywords = ["ABINIT", "ab-initio", "density-function-theory", "first-principles", "electronic-structure", "pymatgen"]
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules",
]
|
davidwaroquiers/abiflows
|
abiflows/core/release.py
|
Python
|
gpl-2.0
| 2,408
|
[
"ABINIT",
"pymatgen"
] |
ccd0bbc9c28d9cbf0bbc355c0df231cd273e9b881aaa7b260ddf66969a985a60
|
import os
import sys
import vtkAll as vtk
from ddapp import botpy
import math
import time
import types
import functools
import numpy as np
from ddapp import transformUtils
from ddapp import lcmUtils
from ddapp.timercallback import TimerCallback
from ddapp.asynctaskqueue import AsyncTaskQueue
from ddapp import objectmodel as om
from ddapp import visualization as vis
from ddapp import applogic as app
from ddapp.debugVis import DebugData
from ddapp import ikplanner
from ddapp.ikparameters import IkParameters
from ddapp import ioUtils
from ddapp.simpletimer import SimpleTimer
from ddapp.utime import getUtime
from ddapp import affordanceitems
from ddapp import robotstate
from ddapp import robotplanlistener
from ddapp import segmentation
from ddapp import planplayback
from ddapp import affordanceupdater
from ddapp import segmentationpanel
from ddapp import vtkNumpy as vnp
from ddapp import switchplanner
from ddapp.tasks.taskuserpanel import TaskUserPanel
from ddapp.tasks.taskuserpanel import ImageBasedAffordanceFit
import ddapp.tasks.robottasks as rt
import ddapp.tasks.taskmanagerwidget as tmw
import drc as lcmdrc
import copy
from PythonQt import QtCore, QtGui
class SurpriseTaskPlanner(object):
def __init__(self, robotSystem):
self.robotSystem = robotSystem
self.robotModel = robotSystem.robotStateModel
self.ikPlanner = robotSystem.ikPlanner
self.lockBackForManip = False
self.lockBaseForManip = True
self.side = 'right'
self.toolTipToHandFrame = robotSystem.ikPlanner.newPalmOffsetGraspToHandFrame(self.side, 0.1)
class ImageFitter(ImageBasedAffordanceFit):
def __init__(self, switchPlanner):
ImageBasedAffordanceFit.__init__(self, numberOfPoints=1)
self.switchPlanner = switchPlanner
self.fitFunc = None
self.pickLineRadius = 0.05
self.pickNearestToCamera = False
self.useLocalPlaneFit = True
self.useVoxelGrid = True
def fit(self, polyData, points):
if self.fitFunc:
self.fitFunc(polyData, points)
def fitSwitchBox(self, polyData, points):
boxPosition = points[0]
wallPoint = points[1]
# find a frame that is aligned with wall
searchRadius = 0.2
planePoints, normal = segmentation.applyLocalPlaneFit(polyData, points[0], searchRadius=np.linalg.norm(points[1] - points[0]), searchRadiusEnd=1.0)
obj = vis.updatePolyData(planePoints, 'wall plane points', color=[0,1,0], visible=False)
obj.setProperty('Point Size', 7)
viewDirection = segmentation.SegmentationContext.getGlobalInstance().getViewDirection()
if np.dot(normal, viewDirection) < 0:
normal = -normal
origin = segmentation.computeCentroid(planePoints)
zaxis = [0,0,1]
xaxis = normal
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
zaxis = np.cross(xaxis, yaxis)
zaxis /= np.linalg.norm(zaxis)
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
# translate that frame to the box position
t.PostMultiply()
t.Translate(boxPosition)
boxFrame = transformUtils.copyFrame(t)
self.switchPlanner.spawnBoxAffordanceAtFrame(boxFrame)
class SurpriseTaskPanel(TaskUserPanel):
def __init__(self, robotSystem):
TaskUserPanel.__init__(self, windowTitle='Surprise Task')
self.planner = SurpriseTaskPlanner(robotSystem)
self.switchPlanner = switchplanner.SwitchPlanner(robotSystem)
self.fitter = ImageFitter(self.switchPlanner)
self.initImageView(self.fitter.imageView)
self.addDefaultProperties()
self.addButtons()
self.addSwitchTasks()
def test(self):
print 'test'
def addButtons(self):
self.addManualSpacer()
self.addManualButton('arms prep 1', self.switchPlanner.planArmsPrep1)
self.addManualButton('arms prep 2', self.switchPlanner.planArmsPrep2)
self.addManualButton('fit switch box', self.fitSwitchBox)
self.addManualButton('spawn switch box affordance', self.switchPlanner.spawnBoxAffordance)
self.addManualButton('spawn footstep frame', self.switchPlanner.spawnFootstepFrame)
self.addManualButton('reset reach frame', self.switchPlanner.updateReachFrame)
# self.addManualButton('plan reach to reach frame', self.switchPlanner.planReach)
self.addManualButton('Reach to pinch reach frame', self.onPlanPinchReach)
self.addManualButton('Commit Manip Plan', self.switchPlanner.commitManipPlan)
def onPlanPinchReach(self):
self.switchPlanner.planPinchReach(maxDegreesPerSecond=self.maxDegreesPerSecond)
def getSide(self):
return self.params.getPropertyEnumValue('Hand').lower()
def addDefaultProperties(self):
self.params.addProperty('max degrees per second', 10, attributes=om.PropertyAttributes(singleStep=1, decimals=2))
self.params.addProperty('Hand', 0, attributes=om.PropertyAttributes(enumNames=['Left', 'Right']))
self.params.setProperty('Hand', self.planner.side.capitalize())
def onPropertyChanged(self, propertySet, propertyName):
if propertyName == 'Hand':
self.planner.side = self.getSide()
self.syncProperties()
def syncProperties(self):
self.maxDegreesPerSecond = self.params.getProperty('max degrees per second')
def setParamsPreTeleop(self):
self.params.setProperty('max degrees per second', 30)
def setParamsTeleop(self):
self.params.setProperty('max degrees per second', 10)
def addTasks(self):
# some helpers
self.folder = None
def addTask(task, parent=None):
parent = parent or self.folder
self.taskTree.onAddTask(task, copy=False, parent=parent)
def addFunc(name, func, parent=None):
addTask(rt.CallbackTask(callback=func, name=name), parent=parent)
def addFolder(name, parent=None):
self.folder = self.taskTree.addGroup(name, parent=parent)
return self.folder
def addManipTask(name, planFunc, userPrompt=False):
prevFolder = self.folder
addFolder(name, prevFolder)
addFunc('plan', planFunc)
if not userPrompt:
addTask(rt.CheckPlanInfo(name='check manip plan info'))
else:
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'))
addFunc('execute manip plan', self.drillDemo.commitManipPlan)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
self.folder = prevFolder
self.taskTree.removeAllTasks()
side = self.getSide()
###############
# add the tasks
# prep
# addFolder('Prep')
# addTask(rt.CloseHand(name='close left hand', side='Left'))
# addTask(rt.CloseHand(name='close right hand', side='Right'))
self.addSwitchTasks()
def addSwitchTasks(self):
# some helpers
self.folder = None
def addTask(task, parent=None):
parent = parent or self.folder
self.taskTree.onAddTask(task, copy=False, parent=parent)
def addFunc(name, func, parent=None):
addTask(rt.CallbackTask(callback=func, name=name), parent=parent)
def addFolder(name, parent=None):
self.folder = self.taskTree.addGroup(name, parent=parent)
return self.folder
def addManipTask(name, planFunc, userPrompt=False):
prevFolder = self.folder
addFolder(name, prevFolder)
addFunc('plan', planFunc)
if not userPrompt:
addTask(rt.CheckPlanInfo(name='check manip plan info'))
else:
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'))
addFunc('execute manip plan', self.switchPlanner.commitManipPlan)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
self.folder = prevFolder
self.taskTree.removeAllTasks()
side = self.getSide()
addFolder('Fit Box Affordance')
addFunc('fit switch box affordance', self.fitSwitchBox)
addTask(rt.UserPromptTask(name='verify/adjust affordance', message='verify/adjust affordance.'))
# walk to drill
addFolder('Walk')
addFunc('plan footstep frame', self.switchPlanner.spawnFootstepFrame)
addTask(rt.RequestFootstepPlan(name='plan walk to drill', stanceFrameName='switch box stance frame'))
addTask(rt.UserPromptTask(name='approve footsteps', message='Please approve footstep plan.'))
addTask(rt.CommitFootstepPlan(name='walk to switch box', planName='switch box stance frame footstep plan'))
addTask(rt.WaitForWalkExecution(name='wait for walking'))
armsUp = addFolder('Arms Up')
addManipTask('Arms Up 1', self.switchPlanner.planArmsPrep1, userPrompt=True)
self.folder = armsUp
addManipTask('Arms Up 2', self.switchPlanner.planArmsPrep2, userPrompt=True)
addTask(rt.CloseHand(side='Right', mode='Pinch', name='set finger pinch'))
reach = addFolder('Reach')
addFunc('set degrees per second 30', self.setParamsPreTeleop)
addFunc('update reach frame', self.switchPlanner.updateReachFrame)
addTask(rt.UserPromptTask(name='adjust frame', message='adjust reach frame if necessary'))
addManipTask('reach above box', self.onPlanPinchReach, userPrompt=True)
teleop = addFolder('Teleop')
addFunc('set degrees per second 10', self.setParamsTeleop)
addTask(rt.UserPromptTask(name='wait for teleop', message='continue when finished with task.'))
armsDown = addFolder('Arms Down')
addTask(rt.UserPromptTask(name='check left hand free', message='check left hand free to close and move back'))
addTask(rt.CloseHand(name='close left hand', side='Right'))
addManipTask('Arms Down 1', self.switchPlanner.planArmsPrep2, userPrompt=True)
self.folder = armsDown
self.folder = armsDown
addManipTask('Arms Down 2', self.switchPlanner.planArmsPrep1, userPrompt=True)
self.folder = armsDown
addManipTask('plan nominal', self.switchPlanner.planNominal, userPrompt=True)
def fitSwitchBox(self):
print 'fitting switch box'
self.fitter.imagePicker.numberOfPoints = 2
self.fitter.pointCloudSource = 'lidar'
self.fitter.fitFunc = self.fitter.fitSwitchBox
|
RussTedrake/director
|
src/python/ddapp/surprisetask.py
|
Python
|
bsd-3-clause
| 10,703
|
[
"VTK"
] |
b2a751459049a711c8378bb8d612df864f627b0af8387027bd822591b8ca5d7a
|
"""
Numba-specific errors and warnings.
"""
import abc
import contextlib
import os
import sys
import warnings
import numba.core.config
import numpy as np
from collections import defaultdict
from numba.core.utils import (chain_exception, use_old_style_errors,
use_new_style_errors)
from functools import wraps
from abc import abstractmethod
# Filled at the end
__all__ = []
class NumbaWarning(Warning):
"""
Base category for all Numba compiler warnings.
"""
def __init__(self, msg, loc=None, highlighting=True, ):
self.msg = msg
self.loc = loc
if highlighting:
highlight = termcolor().errmsg
else:
def highlight(x):
return x
if loc:
super(NumbaWarning, self).__init__(
highlight("%s\n%s\n" % (msg, loc.strformat())))
else:
super(NumbaWarning, self).__init__(highlight("%s" % (msg,)))
class NumbaPerformanceWarning(NumbaWarning):
"""
Warning category for when an operation might not be
as fast as expected.
"""
class NumbaDeprecationWarning(NumbaWarning):
"""
Warning category for use of a deprecated feature.
"""
class NumbaPendingDeprecationWarning(NumbaWarning):
"""
Warning category for use of a feature that is pending deprecation.
"""
class NumbaParallelSafetyWarning(NumbaWarning):
"""
Warning category for when an operation in a prange
might not have parallel semantics.
"""
class NumbaTypeSafetyWarning(NumbaWarning):
"""
Warning category for unsafe casting operations.
"""
class NumbaExperimentalFeatureWarning(NumbaWarning):
"""
Warning category for using an experimental feature.
"""
class NumbaInvalidConfigWarning(NumbaWarning):
"""
Warning category for using an invalid configuration.
"""
class NumbaPedanticWarning(NumbaWarning):
"""
Warning category for reporting pedantic messages.
"""
def __init__(self, msg, **kwargs):
super().__init__(f"{msg}\n{pedantic_warning_info}")
class NumbaIRAssumptionWarning(NumbaPedanticWarning):
"""
Warning category for reporting an IR assumption violation.
"""
# These are needed in the color formatting of errors setup
class _ColorScheme(metaclass=abc.ABCMeta):
@abstractmethod
def code(self, msg):
pass
@abstractmethod
def errmsg(self, msg):
pass
@abstractmethod
def filename(self, msg):
pass
@abstractmethod
def indicate(self, msg):
pass
@abstractmethod
def highlight(self, msg):
pass
@abstractmethod
def reset(self, msg):
pass
class _DummyColorScheme(_ColorScheme):
def __init__(self, theme=None):
pass
def code(self, msg):
pass
def errmsg(self, msg):
pass
def filename(self, msg):
pass
def indicate(self, msg):
pass
def highlight(self, msg):
pass
def reset(self, msg):
pass
# holds reference to the instance of the terminal color scheme in use
_termcolor_inst = None
try:
import colorama
# If the colorama version is < 0.3.9 it can break stdout/stderr in some
# situations, as a result if this condition is met colorama is disabled and
# the user is warned. Note that early versions did not have a __version__.
colorama_version = getattr(colorama, '__version__', '0.0.0')
if tuple([int(x) for x in colorama_version.split('.')]) < (0, 3, 9):
msg = ("Insufficiently recent colorama version found. "
"Numba requires colorama >= 0.3.9")
# warn the user
warnings.warn(msg)
# trip the exception to disable color errors
raise ImportError
# If Numba is running in testsuite mode then do not use error message
# coloring so CI system output is consistently readable without having
# to read between shell escape characters.
if os.environ.get('NUMBA_DISABLE_ERROR_MESSAGE_HIGHLIGHTING', None):
raise ImportError # just to trigger the exception handler below
except ImportError:
class NOPColorScheme(_DummyColorScheme):
def __init__(self, theme=None):
if theme is not None:
raise ValueError("specifying a theme has no effect")
_DummyColorScheme.__init__(self, theme=theme)
def code(self, msg):
return msg
def errmsg(self, msg):
return msg
def filename(self, msg):
return msg
def indicate(self, msg):
return msg
def highlight(self, msg):
return msg
def reset(self, msg):
return msg
def termcolor():
global _termcolor_inst
if _termcolor_inst is None:
_termcolor_inst = NOPColorScheme()
return _termcolor_inst
else:
from colorama import init, reinit, deinit, Fore, Style
class ColorShell(object):
_has_initialized = False
def __init__(self):
init()
self._has_initialized = True
def __enter__(self):
if self._has_initialized:
reinit()
def __exit__(self, *exc_detail):
Style.RESET_ALL
deinit()
class reset_terminal(object):
def __init__(self):
self._buf = bytearray(b'')
def __enter__(self):
return self._buf
def __exit__(self, *exc_detail):
self._buf += bytearray(Style.RESET_ALL.encode('utf-8'))
# define some default themes, if more are added, update the envvars docs!
themes = {}
# No color added, just bold weighting
themes['no_color'] = {'code': None,
'errmsg': None,
'filename': None,
'indicate': None,
'highlight': None,
'reset': None, }
# suitable for terminals with a dark background
themes['dark_bg'] = {'code': Fore.BLUE,
'errmsg': Fore.YELLOW,
'filename': Fore.WHITE,
'indicate': Fore.GREEN,
'highlight': Fore.RED,
'reset': Style.RESET_ALL, }
# suitable for terminals with a light background
themes['light_bg'] = {'code': Fore.BLUE,
'errmsg': Fore.BLACK,
'filename': Fore.MAGENTA,
'indicate': Fore.BLACK,
'highlight': Fore.RED,
'reset': Style.RESET_ALL, }
# suitable for terminals with a blue background
themes['blue_bg'] = {'code': Fore.WHITE,
'errmsg': Fore.YELLOW,
'filename': Fore.MAGENTA,
'indicate': Fore.CYAN,
'highlight': Fore.RED,
'reset': Style.RESET_ALL, }
# suitable for use in jupyter notebooks
themes['jupyter_nb'] = {'code': Fore.BLACK,
'errmsg': Fore.BLACK,
'filename': Fore.GREEN,
'indicate': Fore.CYAN,
'highlight': Fore.RED,
'reset': Style.RESET_ALL, }
default_theme = themes['no_color']
class HighlightColorScheme(_DummyColorScheme):
def __init__(self, theme=default_theme):
self._code = theme['code']
self._errmsg = theme['errmsg']
self._filename = theme['filename']
self._indicate = theme['indicate']
self._highlight = theme['highlight']
self._reset = theme['reset']
_DummyColorScheme.__init__(self, theme=theme)
def _markup(self, msg, color=None, style=Style.BRIGHT):
features = ''
if color:
features += color
if style:
features += style
with ColorShell():
with reset_terminal() as mu:
mu += features.encode('utf-8')
mu += (msg).encode('utf-8')
return mu.decode('utf-8')
def code(self, msg):
return self._markup(msg, self._code)
def errmsg(self, msg):
return self._markup(msg, self._errmsg)
def filename(self, msg):
return self._markup(msg, self._filename)
def indicate(self, msg):
return self._markup(msg, self._indicate)
def highlight(self, msg):
return self._markup(msg, self._highlight)
def reset(self, msg):
return self._markup(msg, self._reset)
def termcolor():
global _termcolor_inst
if _termcolor_inst is None:
scheme = themes[numba.core.config.COLOR_SCHEME]
_termcolor_inst = HighlightColorScheme(scheme)
return _termcolor_inst
pedantic_warning_info = """
This warning came from an internal pedantic check. Please report the warning
message and traceback, along with a minimal reproducer at:
https://github.com/numba/numba/issues/new?template=bug_report.md
"""
feedback_details = """
Please report the error message and traceback, along with a minimal reproducer
at: https://github.com/numba/numba/issues/new?template=bug_report.md
If more help is needed please feel free to speak to the Numba core developers
directly at: https://gitter.im/numba/numba
Thanks in advance for your help in improving Numba!
"""
unsupported_error_info = """
Unsupported functionality was found in the code Numba was trying to compile.
If this functionality is important to you please file a feature request at:
https://github.com/numba/numba/issues/new?template=feature_request.md
"""
interpreter_error_info = """
Unsupported Python functionality was found in the code Numba was trying to
compile. This error could be due to invalid code, does the code work
without Numba? (To temporarily disable Numba JIT, set the `NUMBA_DISABLE_JIT`
environment variable to non-zero, and then rerun the code).
If the code is valid and the unsupported functionality is important to you
please file a feature request at:
https://github.com/numba/numba/issues/new?template=feature_request.md
To see Python/NumPy features supported by the latest release of Numba visit:
https://numba.readthedocs.io/en/stable/reference/pysupported.html
and
https://numba.readthedocs.io/en/stable/reference/numpysupported.html
"""
constant_inference_info = """
Numba could not make a constant out of something that it decided should be
a constant. This could well be a current limitation in Numba's internals,
however please first check that your code is valid for compilation,
particularly with respect to string interpolation (not supported!) and
the requirement of compile time constants as arguments to exceptions:
https://numba.readthedocs.io/en/stable/reference/pysupported.html?highlight=exceptions#constructs
If the code is valid and the unsupported functionality is important to you
please file a feature request at:
https://github.com/numba/numba/issues/new?template=feature_request.md
If you think your code should work with Numba. %s
""" % feedback_details
typing_error_info = """
This is not usually a problem with Numba itself but instead often caused by
the use of unsupported features or an issue in resolving types.
To see Python/NumPy features supported by the latest release of Numba visit:
https://numba.readthedocs.io/en/stable/reference/pysupported.html
and
https://numba.readthedocs.io/en/stable/reference/numpysupported.html
For more information about typing errors and how to debug them visit:
https://numba.readthedocs.io/en/stable/user/troubleshoot.html#my-code-doesn-t-compile
If you think your code should work with Numba, please report the error message
and traceback, along with a minimal reproducer at:
https://github.com/numba/numba/issues/new?template=bug_report.md
"""
reportable_issue_info = """
-------------------------------------------------------------------------------
This should not have happened, a problem has occurred in Numba's internals.
You are currently using Numba version %s.
%s
""" % (numba.__version__, feedback_details)
error_extras = dict()
error_extras['unsupported_error'] = unsupported_error_info
error_extras['typing'] = typing_error_info
error_extras['reportable'] = reportable_issue_info
error_extras['interpreter'] = interpreter_error_info
error_extras['constant_inference'] = constant_inference_info
def deprecated(arg):
"""Define a deprecation decorator.
An optional string should refer to the new API to be used instead.
Example:
@deprecated
def old_func(): ...
@deprecated('new_func')
def old_func(): ..."""
subst = arg if isinstance(arg, str) else None
def decorator(func):
def wrapper(*args, **kwargs):
msg = "Call to deprecated function \"{}\"."
if subst:
msg += "\n Use \"{}\" instead."
warnings.warn(msg.format(func.__name__, subst),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
return wraps(func)(wrapper)
if not subst:
return decorator(arg)
else:
return decorator
class WarningsFixer(object):
"""
An object "fixing" warnings of a given category caught during
certain phases. The warnings can have their filename and lineno fixed,
and they are deduplicated as well.
"""
def __init__(self, category):
self._category = category
# {(filename, lineno, category) -> messages}
self._warnings = defaultdict(set)
@contextlib.contextmanager
def catch_warnings(self, filename=None, lineno=None):
"""
Store warnings and optionally fix their filename and lineno.
"""
with warnings.catch_warnings(record=True) as wlist:
warnings.simplefilter('always', self._category)
yield
for w in wlist:
msg = str(w.message)
if issubclass(w.category, self._category):
# Store warnings of this category for deduplication
filename = filename or w.filename
lineno = lineno or w.lineno
self._warnings[filename, lineno, w.category].add(msg)
else:
# Simply emit other warnings again
warnings.warn_explicit(msg, w.category,
w.filename, w.lineno)
def flush(self):
"""
Emit all stored warnings.
"""
def key(arg):
# It is possible through codegen to create entirely identical
# warnings, this leads to comparing types when sorting which breaks
# on Python 3. Key as str() and if the worse happens then `id`
# creates some uniqueness
return str(arg) + str(id(arg))
for (filename, lineno, category), messages in sorted(
self._warnings.items(), key=key):
for msg in sorted(messages):
warnings.warn_explicit(msg, category, filename, lineno)
self._warnings.clear()
class NumbaError(Exception):
def __init__(self, msg, loc=None, highlighting=True):
self.msg = msg
self.loc = loc
if highlighting:
highlight = termcolor().errmsg
else:
def highlight(x):
return x
if loc:
new_msg = "%s\n%s\n" % (msg, loc.strformat())
else:
new_msg = "%s" % (msg,)
super(NumbaError, self).__init__(highlight(new_msg))
@property
def contexts(self):
try:
return self._contexts
except AttributeError:
self._contexts = lst = []
return lst
def add_context(self, msg):
"""
Add contextual info. The exception message is expanded with the new
contextual information.
"""
self.contexts.append(msg)
f = termcolor().errmsg('{0}\n') + termcolor().filename('During: {1}')
newmsg = f.format(self, msg)
self.args = (newmsg,)
return self
def patch_message(self, new_message):
"""
Change the error message to the given new message.
"""
self.args = (new_message,) + self.args[1:]
class UnsupportedError(NumbaError):
"""
Numba does not have an implementation for this functionality.
"""
pass
class UnsupportedRewriteError(UnsupportedError):
"""UnsupportedError from rewrite passes
"""
pass
class IRError(NumbaError):
"""
An error occurred during Numba IR generation.
"""
pass
class RedefinedError(IRError):
"""
An error occurred during interpretation of IR due to variable redefinition.
"""
pass
class NotDefinedError(IRError):
"""
An undefined variable is encountered during interpretation of IR.
"""
def __init__(self, name, loc=None):
self.name = name
msg = ("The compiler failed to analyze the bytecode. "
"Variable '%s' is not defined." % name)
super(NotDefinedError, self).__init__(msg, loc=loc)
class VerificationError(IRError):
"""
An error occurred during IR verification. Once Numba's internal
representation (IR) is constructed it is then verified to ensure that
terminators are both present and in the correct places within the IR. If
it is the case that this condition is not met, a VerificationError is
raised.
"""
pass
class DeprecationError(NumbaError):
"""
Functionality is deprecated.
"""
pass
class LoweringError(NumbaError):
"""
An error occurred during lowering.
"""
def __init__(self, msg, loc=None):
super(LoweringError, self).__init__(msg, loc=loc)
class UnsupportedParforsError(NumbaError):
"""
An error ocurred because parfors is not supported on the platform.
"""
pass
class ForbiddenConstruct(LoweringError):
"""
A forbidden Python construct was encountered (e.g. use of locals()).
"""
pass
class TypingError(NumbaError):
"""
A type inference failure.
"""
pass
class UntypedAttributeError(TypingError):
def __init__(self, value, attr, loc=None):
module = getattr(value, 'pymod', None)
if module is not None and module == np:
# unsupported numpy feature.
msg = ("Use of unsupported NumPy function 'numpy.%s' "
"or unsupported use of the function.") % attr
else:
msg = "Unknown attribute '{attr}' of type {type}"
msg = msg.format(type=value, attr=attr)
super(UntypedAttributeError, self).__init__(msg, loc=loc)
class ByteCodeSupportError(NumbaError):
"""
Failure to extract the bytecode of the user's function.
"""
def __init__(self, msg, loc=None):
super(ByteCodeSupportError, self).__init__(msg, loc=loc)
class CompilerError(NumbaError):
"""
Some high-level error in the compiler.
"""
pass
class ConstantInferenceError(NumbaError):
"""
Failure during constant inference.
"""
def __init__(self, value, loc=None):
super(ConstantInferenceError, self).__init__(value, loc=loc)
class InternalError(NumbaError):
"""
For wrapping internal error occured within the compiler
"""
def __init__(self, exception):
super(InternalError, self).__init__(str(exception))
self.old_exception = exception
class InternalTargetMismatchError(InternalError):
"""For signalling a target mismatch error occurred internally within the
compiler.
"""
def __init__(self, kind, target_hw, hw_clazz):
msg = (f"{kind.title()} being resolved on a target from which it does "
f"not inherit. Local target is {target_hw}, declared "
f"target class is {hw_clazz}.")
super().__init__(msg)
class RequireLiteralValue(TypingError):
"""
For signalling that a function's typing requires a constant value for
some of its arguments.
"""
pass
class ForceLiteralArg(NumbaError):
"""A Pseudo-exception to signal the dispatcher to type an argument literally
Attributes
----------
requested_args : frozenset[int]
requested positions of the arguments.
"""
def __init__(self, arg_indices, fold_arguments=None, loc=None):
"""
Parameters
----------
arg_indices : Sequence[int]
requested positions of the arguments.
fold_arguments: callable
A function ``(tuple, dict) -> tuple`` that binds and flattens
the ``args`` and ``kwargs``.
loc : numba.ir.Loc or None
"""
super(ForceLiteralArg, self).__init__(
"Pseudo-exception to force literal arguments in the dispatcher",
loc=loc,
)
self.requested_args = frozenset(arg_indices)
self.fold_arguments = fold_arguments
def bind_fold_arguments(self, fold_arguments):
"""Bind the fold_arguments function
"""
e = ForceLiteralArg(self.requested_args, fold_arguments,
loc=self.loc)
return chain_exception(e, self)
def combine(self, other):
"""Returns a new instance by or'ing the requested_args.
"""
if not isinstance(other, ForceLiteralArg):
m = '*other* must be a {} but got a {} instead'
raise TypeError(m.format(ForceLiteralArg, type(other)))
return ForceLiteralArg(self.requested_args | other.requested_args)
def __or__(self, other):
"""Same as self.combine(other)
"""
return self.combine(other)
class LiteralTypingError(TypingError):
"""
Failure in typing a Literal type
"""
pass
# These Exception classes are just Numba copies of their Python equivalents for
# use internally in cases where we want e.g. type inference to keep on trying.
# Exceptions extending from NumbaError are considered "special" by Numba's
# internals and are treated differently to standard Python exceptions which are
# permitted to just propagate up the stack.
class NumbaValueError(TypingError):
pass
class NumbaTypeError(TypingError):
pass
class NumbaAttributeError(TypingError):
pass
class NumbaAssertionError(TypingError):
pass
class NumbaNotImplementedError(TypingError):
pass
class NumbaKeyError(TypingError):
pass
class NumbaIndexError(TypingError):
pass
class NumbaRuntimeError(NumbaError):
pass
def _format_msg(fmt, args, kwargs):
return fmt.format(*args, **kwargs)
_numba_path = os.path.dirname(__file__)
loc_info = {}
@contextlib.contextmanager
def new_error_context(fmt_, *args, **kwargs):
"""
A contextmanager that prepend contextual information to any exception
raised within. If the exception type is not an instance of NumbaError,
it will be wrapped into a InternalError. The exception class can be
changed by providing a "errcls_" keyword argument with the exception
constructor.
The first argument is a message that describes the context. It can be a
format string. If there are additional arguments, it will be used as
``fmt_.format(*args, **kwargs)`` to produce the final message string.
"""
errcls = kwargs.pop('errcls_', InternalError)
loc = kwargs.get('loc', None)
if loc is not None and not loc.filename.startswith(_numba_path):
loc_info.update(kwargs)
try:
yield
except NumbaError as e:
e.add_context(_format_msg(fmt_, args, kwargs))
raise
except AssertionError:
# Let assertion error pass through for shorter traceback in debugging
raise
except Exception as e:
if use_old_style_errors():
newerr = errcls(e).add_context(_format_msg(fmt_, args, kwargs))
if numba.core.config.FULL_TRACEBACKS:
tb = sys.exc_info()[2]
else:
tb = None
raise newerr.with_traceback(tb)
elif use_new_style_errors():
raise e
else:
msg = ("Unknown CAPTURED_ERRORS style: "
f"'{numba.core.config.CAPTURED_ERRORS}'.")
assert 0, msg
__all__ += [name for (name, value) in globals().items()
if not name.startswith('_') and isinstance(value, type)
and issubclass(value, (Exception, Warning))]
|
seibert/numba
|
numba/core/errors.py
|
Python
|
bsd-2-clause
| 24,652
|
[
"VisIt"
] |
f9fe5545bc5f0dda9857a6de226e78b0c117fb221ed14952a54ea514908fbb54
|
#!/usr/bin/env python2
# Copyright (C) 2016-2017(H)
# Max Planck Institute for Polymer Research
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#########################################################################################
# #
# ESPResSo++ Python script for an H-AdResS tetrahedral liquid simulation including KTI #
# #
# KTI stands for Kirkwood Thermodynamic Integration #
#########################################################################################
import sys
import time
import espressopp
import mpi4py.MPI as MPI
import Tetracryst # preparation of tetrahedral crystal and constuctions of bonds in tetrahedral liquid
from espressopp import Real3D, Int3D
from espressopp.tools import decomp
from espressopp.tools import timers
# timestep, cutoff, skin, AdResS specifications
timestep = 0.0005
rc = 4.5 # cutoff coarse-grained potential
rca = 1.122462048309373 # cutoff atomistic potential (cutoff (2^(1/6)), WCA)
skin = 0.4
# parameters for the thermostat
gamma = 2.0
temp = 1.0
# parameters for size of AdResS dimensions
ex_size = 500.0 # By choosing some random but large value here we make sure, that we have an "atomistic" region in the whole box.
# Although we do not perform an actual H-AdResS simulation we need the H-AdResS algorithms and the force calculation
# as it is performed in the atomistic region of an H-AdResS simulation.
hy_size = 10.0
# prepare tetrahedral liquid in crystal
pid, type, x, y, z, vx, vy, vz, Lx, Ly, Lz = espressopp.tools.readxyz("equilibrated_confKTI.xyz")
# table for coarse-grained potential
tabCG = "table_potential.dat"
# number of CG particles
num_particlesCG = len(x)/4
# number of AT particles
num_particles = len(x)
# set up the system
sys.stdout.write('Setting up simulation ...\n')
density = num_particles / (Lx * Ly * Lz)
size = (Lx, Ly, Lz)
system = espressopp.System()
system.rng = espressopp.esutil.RNG()
system.bc = espressopp.bc.OrthorhombicBC(system.rng, size)
system.skin = skin
comm = MPI.COMM_WORLD
nodeGrid = decomp.nodeGrid(comm.size,size,rc,skin)
cellGrid = decomp.cellGrid(size, nodeGrid, rc, skin)
# (H-)AdResS domain decomposition
system.storage = espressopp.storage.DomainDecompositionAdress(system, nodeGrid, cellGrid)
# prepare AT particles
allParticlesAT = []
allParticles = []
tuples = []
for pidAT in range(num_particles):
allParticlesAT.append([pidAT, # add here these particles just temporarily!
Real3D(x[pidAT], y[pidAT], z[pidAT]), # position
Real3D(vx[pidAT], vy[pidAT], vz[pidAT]), # velocity
Real3D(0, 0, 0),
1, 1.0, 1]) # type, mass, is AT particle
# create CG particles
for pidCG in range(num_particlesCG):
# we put CG molecule in first atom, later CG molecules will be positioned in the center
cmp = espressopp.tools.AdressSetCG(4, pidCG, allParticlesAT)
# Preparation of tuples (tuples define, which atoms belong to which CG molecules)
tmptuple = [pidCG+num_particles]
for pidAT2 in range(4):
pid = pidCG*4+pidAT2
tmptuple.append(pid)
# append CG particles
allParticles.append([pidCG+num_particles, # CG particle has to be added first!
Real3D(cmp[0], cmp[1], cmp[2]), # pos
Real3D(0, 0, 0), # vel
Real3D(0, 0, 0), # force
0, 4.0, 0]) # type, mass, is not AT particle
# append AT particles
for pidAT in range(4):
pid = pidCG*4+pidAT
allParticles.append([pid, # now the AT particles can be added
(allParticlesAT[pid])[1], # pos
(allParticlesAT[pid])[2], # vel
(allParticlesAT[pid])[3], # force
(allParticlesAT[pid])[4], # type
(allParticlesAT[pid])[5], # mass
(allParticlesAT[pid])[6]]) # is AT particle
# append tuple to tuplelist
tuples.append(tmptuple)
# add particles to system
system.storage.addParticles(allParticles, "id", "pos", "v", "f", "type", "mass", "adrat")
# add tuples to system
ftpl = espressopp.FixedTupleListAdress(system.storage)
ftpl.addTuples(tuples)
system.storage.setFixedTuplesAdress(ftpl)
# add bonds between AT particles
fpl = espressopp.FixedPairListAdress(system.storage, ftpl)
bonds = Tetracryst.makebonds(len(x))
fpl.addBonds(bonds)
# decompose after adding tuples and bonds
print "Added tuples and bonds, decomposing now ..."
system.storage.decompose()
print "done decomposing"
# AdResS Verlet list
vl = espressopp.VerletListAdress(system, cutoff=rc, adrcut=rc,
dEx=ex_size, dHy=hy_size,
adrCenter=[Lx/2, Ly/2, Lz/2])
# non-bonded potentials
# LJ capped WCA between AT and tabulated potential between CG particles
interNB = espressopp.interaction.VerletListHadressLennardJones(vl, ftpl) # Switch on KTI here!
potWCA = espressopp.interaction.LennardJones(epsilon=1.0, sigma=1.0, shift='auto', cutoff=rca)
potCG = espressopp.interaction.Tabulated(itype=3, filename=tabCG, cutoff=rc) # CG
interNB.setPotentialAT(type1=1, type2=1, potential=potWCA) # AT
interNB.setPotentialCG(type1=0, type2=0, potential=potCG) # CG
system.addInteraction(interNB)
# bonded potentials
# quartic potential between AT particles
potQuartic = espressopp.interaction.Quartic(K=75.0, r0=1.0)
interQuartic = espressopp.interaction.FixedPairListQuartic(system, fpl, potQuartic)
system.addInteraction(interQuartic)
# velocity Verlet integrator
integrator = espressopp.integrator.VelocityVerlet(system)
integrator.dt = timestep
# add AdResS extension
adress = espressopp.integrator.Adress(system, vl, ftpl, KTI = True)
integrator.addExtension(adress)
# add Langevin thermostat extension
langevin = espressopp.integrator.LangevinThermostat(system)
langevin.gamma = gamma
langevin.temperature = temp
langevin.adress = True # enable AdResS!
integrator.addExtension(langevin)
# distribute atoms and CGmolecules according to AdResS domain decomposition
espressopp.tools.AdressDecomp(system, integrator)
# system information
print ''
print 'number of AT particles =', num_particles
print 'number of CG particles =', num_particlesCG
print 'density = %.4f' % (density)
print 'rc =', rc
print 'dt =', integrator.dt
print 'skin =', system.skin
print 'NodeGrid = %s' % (nodeGrid,)
print 'CellGrid = %s' % (cellGrid,)
print ''
# analysis
temperature = espressopp.analysis.Temperature(system)
pressure = espressopp.analysis.Pressure(system)
# timer
start_time = time.clock()
# set lambdas and derivates to zero
for i in range(num_particles + num_particlesCG):
system.storage.modifyParticle(i, 'lambda_adrd', 0.0)
system.storage.modifyParticle(i, 'lambda_adr', 0.0)
system.storage.decompose()
### EQUILIBRATION ###
# equilibration parameters
EQsteps = 1000
EQintervals = 100
EQnsteps = EQsteps/EQintervals
print ''
print 'Short equilibration'
print 'Equilibration steps =', EQsteps
print ''
# print the data of the intial configuration
fmt = '%5d %8.4f %10.5f %12.3f %12.3f %12.3f %12.3f %12.3f %12.3f\n'
T = temperature.compute()
P = pressure.compute()
Ek = 0.5 * T * (3.0 * num_particles)
Ep = interNB.computeEnergy()
Eb = interQuartic.computeEnergy()
Eaa = interNB.computeEnergyAA()
Ecg = interNB.computeEnergyCG()
sys.stdout.write(' step T P etotal enonbonded ebonded ekinetic eallatom ecg \n')
sys.stdout.write(fmt % (0, T, P, Ek + Ep + Eb, Ep, Eb, Ek, Eaa, Ecg))
# do equilibration
for s in range(1, EQintervals + 1):
integrator.run(EQnsteps)
EQstep = EQnsteps * s
T = temperature.compute()
P = pressure.compute()
Ek = 0.5 * T * (3 * num_particles)
Ep = interNB.computeEnergy()
Eb = interQuartic.computeEnergy()
Eaa = interNB.computeEnergyAA()
Ecg = interNB.computeEnergyCG()
sys.stdout.write(fmt % (EQstep, T, P, Ek + Ep + Eb, Ep, Eb, Ek, Eaa, Ecg))
print ''
print 'Equilibration Done'
print ''
### KIRKWOOD TI ###
# TI parameters
bins = 100
steps = 100
stepsequi = 50
intervals = 10
nstepsTI = steps/intervals
lambdastep = 1.0/bins
# specify output filename
namerawFile = 'KirkwoodTI_rawdata.dat'
print ''
print 'Starting Kirkwood TI'
print ''
print 'Kirkwood TI steps =', bins
print 'Kirkwood TI stepwidth =', lambdastep
print 'Integration steps for each lambda =', steps
print 'Equilibration steps after each lamda switch =', stepsequi
print 'Intervals for taking data and printing information to screen =', intervals
print ''
# print the data of the starting configuration
fmt = '%5d %8.4f %10.5f %12.3f %12.3f %12.3f %12.3f %12.3f %12.3f\n'
T = temperature.compute()
P = pressure.compute()
Ek = 0.5 * T * (3.0 * num_particles)
Ep = interNB.computeEnergy()
Eb = interQuartic.computeEnergy()
Eaa = interNB.computeEnergyAA()
Ecg = interNB.computeEnergyCG()
sys.stdout.write(' step T P etotal enonbonded ebonded ekinetic eallatom ecg \n')
sys.stdout.write(fmt % (0, T, P, Ek + Ep + Eb, Ep, Eb, Ek, Eaa, Ecg))
print ''
# output arrays
Energydiff = []
Pressurediff = []
# Kirkwood steps
for i in range(bins+1):
# change Lambda
print 'Kirkwood step: %d' %i
print 'Lambda: %f' %(lambdastep*i)
for p in range(num_particles + num_particlesCG):
system.storage.modifyParticle(p, 'lambda_adr', lambdastep*i)
system.storage.decompose()
# equilibration
integrator.run(stepsequi)
step = i * (steps+stepsequi) + stepsequi
T = temperature.compute()
P = pressure.compute()
Ek = 0.5 * T * (3.0 * num_particles)
Ep = interNB.computeEnergy()
Eb = interQuartic.computeEnergy()
Eaa = interNB.computeEnergyAA()
Ecg = interNB.computeEnergyCG()
sys.stdout.write(fmt % (step, T, P, Ek + Ep + Eb, Ep, Eb, Ek, Eaa, Ecg))
# Kirkwood integration
runningEdiff = 0.0
runningP = 0.0
for s in range(1,intervals+1):
integrator.run(nstepsTI)
step = i * (steps+stepsequi) + s * nstepsTI + stepsequi
T = temperature.compute()
P = pressure.compute()
Ek = 0.5 * T * (3.0 * num_particles)
Ep = interNB.computeEnergy()
Eb = interQuartic.computeEnergy()
Eaa = interNB.computeEnergyAA()
Ecg = interNB.computeEnergyCG()
sys.stdout.write(fmt % (step, T, P, Ek + Ep + Eb, Ep, Eb, Ek, Eaa, Ecg))
# get the relevant energy and pressure differences
runningEdiff += Ecg - Eaa
runningP += P
# get the averages
runningEdiff/=intervals
runningP/=intervals
# append to output arrays
Energydiff.append(runningEdiff)
Pressurediff.append(runningP)
# print the raw output to file
print ''
print "Kirkwood TI done, printing raw data to %s\n" %namerawFile
form = '%12.8f %12.8f %12.8f\n'
rawFile = open (namerawFile, 'w')
rawFile.write('lambda V_CG-V_AA P(lambda)\n')
for i in range( bins+1 ):
rawFile.write(form % ( lambdastep*i, Energydiff[i], Pressurediff[i] ))
rawFile.close()
# simulation information
end_time = time.clock()
sys.stdout.write('Neighbor list builds = %d\n' % vl.builds)
sys.stdout.write('Integration steps = %d\n' % integrator.step)
sys.stdout.write('CPU time = %.1f\n' % (end_time - start_time))
|
govarguz/espressopp
|
examples/adress/hadress_tetraliquid/hadress_tetraliquid_KTI/hadressKTI.py
|
Python
|
gpl-3.0
| 11,990
|
[
"CRYSTAL",
"ESPResSo"
] |
61fb2402209a0aeafd26578676ff3ad43218b5f2fe5e16a45ef86e9963d1be28
|
# Load CosmoMC format .dataset files with lensing likelihood data
# AL July 2014
# note this is not well tested with final published versions of likelihoods
# Does not handle calibration parameter
from __future__ import absolute_import
from __future__ import print_function
from matplotlib import pyplot as plt
import os
import numpy as np
import sys
from getdist import IniFile
def readTextCommentColumns(fname, cols):
with open(fname) as f:
x = f.readline().strip()
if x[0] != '#': raise Exception('No Comment')
incols = x[1:].split()
colnums = [incols.index(col) for col in cols]
return np.loadtxt(fname, usecols=colnums, unpack=True)
def readWithHeader(fname):
with open(fname) as f:
x = f.readline().strip()
if x[0] != '#': raise Exception('No Comment')
x = x[1:].split()
return x, np.loadtxt(fname)
class ClsArray(object):
# Store arrays of cls: self.cls_array[i,j] is zero based array of correlation of field i with j
def __init__(self, filename=None, cols=None, field_names=['T', 'E', 'B', 'P']):
self.field_names = field_names
self.cls_array = np.zeros((len(field_names), len(field_names)), dtype=np.object)
self.cls_array[:, :] = None
if filename is not None:
self.loadFromFile(filename, cols)
def loadFromFile(self, filename, cols=None):
if cols is None:
cols, dat = readWithHeader(filename)
else:
dat = np.loadtxt(filename)
Lix = cols.index('L')
L = dat[:, Lix]
self.lmin = L[0]
self.lmax = L[-1]
for i, f in enumerate(self.field_names):
for j, f2 in enumerate(self.field_names[:i + 1]):
try:
ix = cols.index(f + f2)
except:
try:
ix = cols.index(f2 + f)
except:
continue
cls = np.zeros(self.lmax + 1)
cls[self.lmin:self.lmax + 1] = dat[:, ix]
self.cls_array[i, j] = cls
def get(self, indices):
i, j = indices
if j > i: i, j = j, i
return self.cls_array[i, j]
class BinWindows(object):
def __init__(self, lmin, lmax, nbins):
self.lmin = lmin
self.lmax = lmax
self.nbins = nbins
def bin(self, TheoryCls, b, cls=None):
if cls is None: cls = np.zeros(max([x for x in self.cols_out if x is not None]) + 1)
for i, (pair_in, ix_out) in enumerate(zip(self.cols_in, self.cols_out)):
cl = TheoryCls.get(pair_in)
if cl is not None and ix_out is not None:
cls[ix_out] += np.dot(self.binning_matrix[b, i, :], cl[self.lmin:self.lmax + 1])
return cls
def write(self, froot, stem):
if not os.path.exists(froot + stem + '_window'): os.mkdir(froot + '_window')
for b in range(self.nbins):
with open(froot + stem + '_window/window%u.dat' % (b + 1), 'w') as f:
for L in np.arange(self.lmin[b], self.lmax[b] + 1):
f.write(("%5u " + "%10e" * len(self.cols_in) + "\n") % (L, self.binning_matrix[b, :, L]))
class DatasetLikelihood(object):
def __init__(self, fname, field_names=['T', 'E', 'B', 'P']):
self.field_names = field_names
self.tot_fields = len(field_names)
if '.dataset' in fname: self.loadDataset(fname)
else: raise Exception('lensLike only supports .dataset files')
def typeIndex(self, field):
return self.field_names.index(field)
def clString_to_fieldPair(self, cl):
if '_' in cl: fields = cl.split('_')
else:
if len(cl) != 2: raise Exception('Cl_order but be CL names, 2 characters or _ separated')
fields = [cl[0], cl[1]]
if len(fields) != 2: raise Exception('Invalid C_l order, must have pairs of field names')
pair = [self.typeIndex(fields[0]), self.typeIndex(fields[1])]
if pair[1] > pair[0]: pair.reverse()
return pair
def UseString_to_theoryPairCls(self, L):
pairs = []
for cl in L:
pairs.append(self.clString_to_fieldPair(cl))
return pairs
def UseString_to_cols(self, L):
cols = [None] * len(L)
for i, cl in enumerate(L):
pair = self.clString_to_fieldPair(cl)
i1 = self.field_index[pair[0]]
i2 = self.field_index[pair[1]]
if i1 < 0 or i2 < 0: continue
if i2 > i1: i1, i2 = i2, i1
ix = 0
for ii in range(self.nfields):
for jj in range(ii + 1):
if ii == i1 and jj == i2: cols[i] = ix
ix += 1
return cols
def readBinWindows(self, ini, file_stem):
bins = BinWindows(self.cl_lmin, self.cl_lmax, self.nbins)
in_cl = ini.split(file_stem + '_in_order')
out_cl = ini.split(file_stem + '_out_order')
bins.cols_in = self.UseString_to_theoryPairCls(in_cl)
bins.cols_out = self.UseString_to_cols(out_cl)
norder = len(bins.cols_in)
if norder != len(bins.cols_out):
raise Exception('_in_order and _out_order must have same number of entries')
bins.binning_matrix = np.zeros((self.nbins, norder, self.cl_lmax - self.cl_lmin + 1))
windows = ini.relativeFileName(file_stem + '_files')
for b in range(self.nbins):
window = np.loadtxt(windows % (b + 1))
Err = False
for i, L in enumerate(window[:, 0].astype(int)):
if self.cl_lmin <= L <= self.cl_lmax:
bins.binning_matrix[b, :, L - self.cl_lmin] = window[i, 1:]
else:
Err = Err or any(window[i, 1:] != 0)
if Err: print('WARNING: %s %u outside cl_lmin-cl_max range: %s' % (file_stem, b, windows % (b + 1)))
return bins
def loadDataset(self, froot):
if not '.dataset' in froot: froot += '.dataset'
ini = IniFile(froot)
self.readIni(ini)
def readIni(self, ini):
self.like_approx = ini.string('like_approx', 'gaussian')
if self.like_approx != 'gaussian': raise Exception('Only gaussian implented in python so far')
self.fields_use = ini.split('fields_use')
index_use = [self.typeIndex(f) for f in self.fields_use]
self.use_field = [i in index_use for i in range(len(self.field_names))]
self.nfields = sum(self.use_field)
if ini.hasKey('fields_required'):
self.fields_required = ini.string('fields_required').split()
else: self.fields_required = self.fields_use
index_use = [self.typeIndex(f) for f in self.fields_required]
self.required_field = [i in index_use for i in range(len(self.field_names))]
self.binned = ini.bool('binned', True)
if not self.binned: raise Exception('Currently only support binned')
self.field_index = np.zeros(self.tot_fields, dtype=int) - 1
self.fields = np.zeros(self.tot_fields)
self.field_order = []
ix = 0
for i in range(self.tot_fields):
if self.use_field[i]:
self.field_index[i] = ix
self.fields[ix] = i
self.field_order.append(self.field_names[i])
ix += 1
self.ncl = self.nfields * (self.nfields + 1)
self.nbins = ini.int('nbins')
self.bin_min = ini.int('use_min', 1) - 1
self.bin_max = ini.int('use_max', self.nbins) - 1
self.nbins_used = self.bin_max - self.bin_min + 1
self.cl_lmax = ini.int('cl_lmax')
self.cl_lmin = ini.int('cl_lmin')
self.phi_lmax = self.cl_lmax
self.lmin, self.lmax, self.lav, self.bandpowers, self.Ahat = readTextCommentColumns(
ini.relativeFileName('cl_hat_file'), ['L_min', 'L_max', 'L_av', 'PP', 'Ahat'])
self.bins = self.readBinWindows(ini, 'bin_window')
self.cov = np.loadtxt(ini.relativeFileName('covmat_fiducial'))
cov = self.cov[self.bin_min:self.bin_max + 1, self.bin_min:self.bin_max + 1]
self.covinv = np.linalg.inv(cov)
if 'linear_correction_fiducial_file' in ini.params:
self.fid_correction = np.loadtxt(ini.relativeFileName('linear_correction_fiducial_file'))[:, 1]
self.linear_correction = self.readBinWindows(ini, 'linear_correction_bin_window')
else:
self.linear_correction = None
def writeData(self, froot):
np.savetxt(froot + '_cov.dat', self.cov)
# self.saveCl(froot + '_fid_cl.dat', self.fid_cl[:, 1:], cols=['TT', 'EE', 'TE', 'PP'])
with open(froot + '_bandpowers.dat', 'w') as f:
f.write("#%4s %5s %5s %8s %12s %10s %7s\n" % ('bin', 'L_min', 'L_max', 'L_av', 'PP', 'Error', 'Ahat'))
for b in range(self.nbins):
f.write("%5u %5u %5u %8.2f %12.5e %10.3e %7.3f\n" % (b + 1, self.lmin[b], self.lmax[b], self.lav[b],
self.bandpowers[b], np.sqrt(self.cov[b, b]), self.Ahat[b]))
self.bins.write(froot, 'bin')
if self.linear_correction is not None:
self.linear_correction.write(froot, 'linear_correction_bin')
with open(froot + '_lensing_fiducial_correction', 'w') as f:
f.write("#%4s %12s \n" % ('bin', 'PP'))
for b in range(self.nbins):
f.write("%5u %12.5e\n" % (b + 1, self.fid_correction[b]))
def plot(self, phicl=None, ls=None):
lbin = self.lav
binned_phicl_err = np.zeros(self.nbins)
for b in range(self.nbins):
binned_phicl_err[b] = np.sqrt(self.cov[b, b])
plt.errorbar(lbin, self.bandpowers, yerr=binned_phicl_err , xerr=[lbin - self.lmin, self.lmax - lbin], fmt='o')
if phicl is not None:
if isinstance(phicl, ClsArray): phicl = phicl.get([3, 3])
if ls is None: ls = np.arange(len(phicl))
plt.plot(ls, phicl, color='k')
plt.xlim([2, ls[-1]])
def chi_squared(self, ClArray):
binphi = np.zeros(self.nbins_used)
for b in range(self.bin_min, self.bin_max + 1):
band = self.bins.bin(ClArray, b)[0]
if self.linear_correction:
band += self.linear_correction.bin(ClArray, b) - self.fid_correction[b]
binphi[b - self.bin_min] = band
delta = binphi - self.bandpowers[self.bin_min:self.bin_max + 1]
return np.dot(delta, np.dot(self.covinv, delta))
def plotAndChisq(dataset, cl_file):
d = DatasetLikelihood(dataset)
cls = ClsArray(cl_file)
d.plot(cls)
print('Chi-squared: ', d.chi_squared(cls))
plt.show()
if __name__ == "__main__":
# plotAndChisq(r'test_data/g60_full_pp.dataset', r'test_data/testout_pp.theory_cl')
# sys.exit()
try: import argparse
except:
print('use "module load" to load python 2.7')
sys.exit()
parser = argparse.ArgumentParser(description="Load .dataset and calculate likelihood")
parser.add_argument('dataset', help='.dataset filename')
parser.add_argument('cl_file', help='file of Cls')
args = parser.parse_args()
plotAndChisq(args.dataset, args.cl_file)
|
ClaudioNahmad/Servicio-Social
|
Parametros/CosmoMC/CosmoMC-master/python/CMBlikes.py
|
Python
|
gpl-3.0
| 12,103
|
[
"Gaussian"
] |
e5e3bb36087fdb988a5939ad9b09a54518cb9264c19ef35c9ef05c8c52b625cb
|
import random
from lib.keras_utils import *
from lib.utils import *
from parameters import *
from skimage.transform import ProjectiveTransform
EPS = 1e-10 # Epsilon
MIN_CP = -2. # Minimum power index of c
MAX_CP = 2. # Maximum power index of c
SCORE_THRES = 0.99 # Softmax score threshold to consider success of attacks
PROG_PRINT_STEPS = 200 # Print progress every certain steps
EARLYSTOP_STEPS = 1000 # Early stopping if no improvement for certain steps
INT_TRN = 0.00 # Degree of randomness (for perspective transform)
DELTA_BRI = 0.15 # Degree of randomness (for brightness adjust)
class OptTranLane:
"""
This class implements a generator for adversarial examples that are robust
to certain transformations or variations. It is a modification from
Carlini et al. (https://arxiv.org/abs/1608.04644) and Athalye et al.
(https://arxiv.org/abs/1707.07397)
"""
def _setup_opt(self):
"""Used to setup optimization when c is updated"""
# obj_func = c * loss + l2-norm(d) (+ smoothness penalty)
self.f = self.c * self.loss + self.c_smooth * self.smooth + self.norm
# Setup optimizer
if self.use_bound:
# Use Scipy optimizer with upper and lower bound [0, 1]
self.optimizer = ScipyOptimizerInterface(
self.f, var_list=self.var_list, var_to_bounds={
self.x_in: (0, 1)},
method="L-BFGS-B")
else:
# Use learning rate decay
global_step = tf.Variable(0, trainable=False)
if self.decay:
lr = tf.train.inverse_time_decay(
self.lr, global_step, 50, 0.01, staircase=True)
else:
lr = self.lr
# Use Adam optimizer
self.optimizer = tf.train.AdamOptimizer(
learning_rate=lr, beta1=0.9, beta2=0.999, epsilon=1e-08)
self.opt = self.optimizer.minimize(
self.f, var_list=self.var_list, global_step=global_step)
def __init__(self, model, target=True, c=1, lr=0.01, init_scl=0.1,
use_bound=False, loss_op=0, k=5, var_change=True, p_norm="1",
l=0, use_mask=True, decay=True, batch_size=BATCH_SIZE,
sp_size=None, rnd_tran=INT_TRN, rnd_bri=DELTA_BRI, c_smooth=0):
"""
Initialize the optimizer. Default values of the parameters are
recommended and decided specifically for attacking traffic sign
recognizer trained on GTSRB dataset.
Parameters
----------
model : Keras model
Target model to attack
target : (optional) bool (default: True)
True if performing targeted attack; False, otherwise.
c : (optional) float (default: 1)
Constant balancing the objective function (f) between norm
of perturbation and loss (f = c * loss + norm). Larger c
means stronger attack but also more "visible" (stronger
perturbation).
lr : (optional) float (default: 0.01)
Learning rate of optimizer
init_scl : (optional) float (default: 0.1)
Standard deviation of Gaussian used to initialize objective
variable
use_bound : (optional) bool (default: False)
If True, optimizer with bounding box [0, 1] will be used.
Otherwise, Adam optimizer is used.
loss_op : (optional) int (default: 0)
Option for loss function to optimize over.
loss_op = 0: Carlini's l2-attack
loss_op = 1: cross-entropy loss
k : (optional) float (default: 5)
"Confidence threshold" used with loss_op = 0. Used to
control strength of attack. The higher the k the stronger
the attack.
var_change : (optional) bool (default: True)
If True, objective variable will be changed according to
Carlini et al. (which also gets rid of the need to use
any bounding) Otherwise, optimize directly on perturbation.
use_mask : (optional) bool (default: True)
if True, perturbation will be masked before applying to
the target sign. Mask must be specified when calling
optimize() and optimize_search().
batch_size : (optional) int (default: BATCH_SIZE)
Define number of transformed images to use
sp_size : (optional) np.array, shape=(batch_size, 2) (default: 600)
Specify upsampling size for each transformed sample.
rnd_tran : (optional) float (default: INT_TRN)
Degree of randomness for perspective transformation
rnd_bri : (optional) float (default: DELTA_BRI)
Degree of randomness for brightness adjustment
c_smooth : (optional) float (default: 0)
An experimental param to force the optimization to look for
smoother perturbation
"""
self.model = model
self.target = target
self.c = c
self.lr = lr
self.use_bound = use_bound
self.loss_op = loss_op
self.k = k
self.use_mask = use_mask
self.decay = decay
self.batch_size = batch_size
self.var_change = var_change
self.init_scl = init_scl
self.rnd_tran = rnd_tran
self.c_smooth = c_smooth
# Initialize variables
init_val = tf.random_uniform(INPUT_SHAPE, minval=-init_scl,
maxval=init_scl, dtype=tf.float32)
self.x = K.placeholder(name='x', dtype='float32', shape=INPUT_SHAPE)
self.y = K.placeholder(name='y', dtype='float32',
shape=(1, OUTPUT_DIM))
if self.use_mask:
self.m = K.placeholder(
name='m', dtype='float32', shape=INPUT_SHAPE)
# If change of variable is specified
if var_change:
# Optimize on w instead of d
self.w = tf.Variable(initial_value=init_val, trainable=True,
dtype=tf.float32)
x_full = (0.5 + EPS) * (tf.tanh(self.w) + 1)
self.d = x_full - self.x
if self.use_mask:
self.d = tf.multiply(self.d, self.m)
self.x_d = self.x + self.d
self.var_list = [self.w]
else:
# Optimize directly on d (perturbation)
d = tf.Variable(initial_value=init_val, trainable=True,
dtype=tf.float32)
if self.use_mask:
self.d = tf.multiply(d, self.m)
else:
self.d = d
self.x_in = self.x + self.d
# Require clipping - projection to feasible region
self.x_d = tf.clip_by_value(self.x_in, 0, 1)
self.var_list = [d]
# Get x_in by transforming x_d by given transformation matrices
self.M = K.placeholder(name='M', dtype='float32',
shape=(self.batch_size, 8))
x_repeat = tf.tile(self.x_d, [self.batch_size, 1, 1, 1])
self.x_tran = tf.contrib.image.transform(x_repeat, self.M,
interpolation='BILINEAR')
# Randomly adjust brightness
b = np.multiply((2 * np.random.rand(batch_size, 1) - 1) * rnd_bri,
np.ones(shape=(batch_size, N_FEATURE)))
b = b.reshape((batch_size,) + IMG_SHAPE)
delta = tf.Variable(initial_value=b, trainable=False, dtype=tf.float32)
self.x_b = tf.clip_by_value(self.x_tran + delta, 0, 1)
# Upsample and downsample
def resize(x):
tmp = tf.image.resize_images(
x[0], x[1], method=tf.image.ResizeMethod.BILINEAR)
return tf.image.resize_images(
tmp, [HEIGHT, WIDTH], method=tf.image.ResizeMethod.BILINEAR)
# Set upsampling size to be 600x600 if not specified
if sp_size is None:
sp_size = np.zeros((batch_size, 2)) + 600
up_size = tf.constant(sp_size, dtype=tf.int32)
# Use map_fn to do random resampling for each image
self.x_rs = tf.map_fn(resize, (self.x_b, up_size), dtype=tf.float32)
model_output = self.model(self.x_rs)
loss_all = tf.losses.mean_squared_error(self.y, model_output)
self.loss = tf.reduce_sum(loss_all)
self.loss /= self.batch_size
# Regularize perturbation with specified norm
if p_norm == "2":
norm = tf.norm(self.d, ord='euclidean')
elif p_norm == "1":
norm = tf.norm(self.d, ord=1)
elif p_norm == "inf":
norm = tf.norm(self.d, ord=np.inf)
else:
raise ValueError("Invalid norm_op")
# Find difference between each pixel and the ones next to it. Penalize
# this difference to make the perturbation look smoother
d_x = tf.concat([self.d[:, WIDTH - 1:, :],
self.d[:, :WIDTH - 1, :]], axis=1)
d_y = tf.concat([self.d[HEIGHT - 1:, :, :],
self.d[:HEIGHT - 1, :, :]], axis=0)
smooth_x = tf.reduce_sum(tf.square(self.d - d_x))
smooth_y = tf.reduce_sum(tf.square(self.d - d_y))
self.smooth = smooth_x + smooth_y
# Encourage norm to be larger than some value
self.norm = tf.maximum(norm, l)
self._setup_opt()
def _get_rand_transform_matrix(self, image_size, d, batch_size):
M = np.zeros((batch_size, 8))
for i in range(batch_size):
tl_top = random.uniform(-d, d) # Top left corner, top
tl_left = random.uniform(-d, d) # Top left corner, left
bl_bottom = random.uniform(-d, d) # Bot left corner, bot
bl_left = random.uniform(-d, d) # Bot left corner, left
tr_top = random.uniform(-d, d) # Top right corner, top
tr_right = random.uniform(-d, d) # Top right corner, right
br_bottom = random.uniform(-d, d) # Bot right corner, bot
br_right = random.uniform(-d, d) # Bot right corner, right
transform = ProjectiveTransform()
if i == 0:
transform.estimate(np.array((
(0, 0),
(0, image_size),
(image_size, image_size),
(image_size, 0)
)), np.array((
(0, 0),
(0, image_size),
(image_size, image_size),
(image_size, 0)
)))
else:
transform.estimate(np.array((
(tl_left, tl_top),
(bl_left, image_size - bl_bottom),
(image_size - br_right, image_size - br_bottom),
(image_size - tr_right, tr_top)
)), np.array((
(0, 0),
(0, image_size),
(image_size, image_size),
(image_size, 0)
)))
M[i] = transform.params.flatten()[:8]
return M
def optimize(self, x, y, n_step=1000, prog=True, mask=None):
"""
Run optimization attack, produce adversarial example from a batch of
images transformed from a single sample, x.
Parameters
----------
x : np.array
Original benign sample
y : np.array
One-hot encoded target label if <target> was set to True or
one-hot encoded true label, otherwise.
n_step : (optional) int
Number of steps to run optimization
prog : (optional) bool
True if progress should be printed
mask : (optional) np.array of 0 or 1, shape=(n_sample, height, width)
Mask to restrict gradient update on valid pixels
Returns
-------
x_adv : np.array, shape=INPUT_SHAPE
Output adversarial example created from x
"""
with tf.Session() as sess:
# Create inputs to optimization
x_ = np.copy(x).reshape(INPUT_SHAPE)
y_ = np.copy(y).reshape((1, OUTPUT_DIM))
# Generate a batch of transformed images
M_ = self._get_rand_transform_matrix(
WIDTH, np.floor(WIDTH * self.rnd_tran), self.batch_size)
# Include mask in feed_dict if mask is used
if self.use_mask:
# Repeat mask for all channels
m_ = np.repeat(
mask[np.newaxis, :, :, np.newaxis], N_CHANNEL, axis=3)
feed_dict = {self.x: x_, self.y: y_, self.M: M_, self.m: m_,
K.learning_phase(): False}
else:
feed_dict = {self.x: x_, self.y: y_, self.M: M_,
K.learning_phase(): False}
# Initialize variables and load weights
sess.run(tf.global_variables_initializer())
self.model.load_weights(WEIGTHS_PATH)
if self.var_change:
# Initialize w = arctanh( 2(x + noise) - 1 )
init_rand = np.random.normal(
-self.init_scl, self.init_scl, size=INPUT_SHAPE)
# Clip values to remove numerical error atanh(1) or atanh(-1)
tanhw = np.clip((x_ + init_rand) * 2 - 1, -1 + EPS, 1 - EPS)
self.w.load(np.arctanh(tanhw))
# Set up some variables for early stopping
min_norm = float("inf")
min_d = None
earlystop_count = 0
# Start optimization
for step in range(n_step):
if self.use_bound:
self.optimizer.minimize(sess, feed_dict=feed_dict)
else:
sess.run(self.opt, feed_dict=feed_dict)
# return sess.run(self.d_x, feed_dict=feed_dict)
# Keep track of "best" solution
if self.loss_op == 0:
norm = sess.run(self.norm, feed_dict=feed_dict)
loss = sess.run(self.loss, feed_dict=feed_dict)
# Save working adversarial example with smallest norm
if loss < -0.95 * self.k:
if norm < min_norm:
min_norm = norm
min_d = sess.run(self.d, feed_dict=feed_dict)
# Reset early stopping counter
earlystop_count = 0
else:
earlystop_count += 1
# Early stop if no improvement
if earlystop_count > EARLYSTOP_STEPS:
print(step, min_norm)
break
# Print progress
if (step % PROG_PRINT_STEPS == 0) and prog:
f = sess.run(self.f, feed_dict=feed_dict)
norm = sess.run(self.norm, feed_dict=feed_dict)
loss = sess.run(self.loss, feed_dict=feed_dict)
smooth = sess.run(self.smooth, feed_dict=feed_dict)
print(("Step: {}, norm={:.3f}, loss={:.3f}, smooth={:.3f},"
" obj={:.3f}").format(step, norm, loss, smooth, f))
if min_d is not None:
x_adv = (x_ + min_d).reshape(IMG_SHAPE)
return x_adv, min_norm
else:
d = sess.run(self.d, feed_dict=feed_dict)
norm = sess.run(self.norm, feed_dict=feed_dict)
x_adv = (x_ + d).reshape(IMG_SHAPE)
return x_adv, norm
def optimize_search(self, x, y, n_step=1000, search_step=10, prog=True,
mask=None):
"""
Run optimization attack, produce adversarial example from a batch of
images transformed from a single sample, x. Does binary search on
log_10(c) to find optimal value of c.
Parameters
----------
x : np.array
Original benign sample
y : np.array, shape=(OUTPUT_DIM,)
One-hot encoded target label if <target> was set to True or
one-hot encoded true label, otherwise.
n_step : (optional) int
Number of steps to run optimization
search_step : (optional) int
Number of steps to search on c
prog : (optional) bool
True if progress should be printed
mask : (optional) np.array of 0 or 1, shape=(n_sample, height, width)
Mask to restrict gradient update on valid pixels
Returns
-------
x_adv_suc : np.array, shape=INPUT_SHAPE
Successful adversarial example created from x. None if fail.
norm_suc : float
Perturbation magnitude of x_adv_suc. None if fail.
"""
# Declare min-max of search line [1e-2, 1e2] for c = 1e(cp)
cp_lo = MIN_CP
cp_hi = MAX_CP
x_adv_suc = None
norm_suc = float("inf")
start_time = time.time()
# Binary search on cp
for c_step in range(search_step):
# Update c
cp = (cp_lo + cp_hi) / 2
self.c = 10 ** cp
self._setup_opt()
# Run optimization with new c
x_adv, norm = self.optimize(
x, y, n_step=n_step, prog=False, mask=mask)
# Evaluate result
y_pred = self.model.predict(x_adv.reshape(INPUT_SHAPE))[0]
score = softmax(y_pred)[np.argmax(y)]
if self.target:
if score > SCORE_THRES:
# Attack succeeded, decrease cp to lower norm
cp_hi = cp
# Only save adv example if norm becomes smaller
if norm < norm_suc:
x_adv_suc = np.copy(x_adv)
norm_suc = norm
else:
# Attack failed, increase cp for stronger attack
cp_lo = cp
else:
if score > 1 - SCORE_THRES:
# Attack failed, increase cp for stronger attack
cp_lo = cp
else:
# Attack succeeded, decrease cp to lower norm
cp_hi = cp
# Only save adv example if norm becomes smaller
if norm < norm_suc:
x_adv_suc = np.copy(x_adv)
norm_suc = norm
if prog:
print("c_Step: {}, c={:.4f}, score={:.3f}, norm={:.3f}".format(
c_step, self.c, score, norm))
print("Finished in {:.2f}s".format(time.time() - start_time))
return x_adv_suc, norm_suc
|
chawins/aml
|
lib/OptTranLane.py
|
Python
|
mit
| 19,306
|
[
"Gaussian"
] |
192a500cb1813c9c98180c4639f61bcd916942ce2626335c255c40d707bb24af
|
# -----------------------------------------------------------------------------
# Copyright (c) 2015 Ralph Hempel <rhempel@hempeldesigngroup.com>
# Copyright (c) 2015 Anton Vanhoucke <antonvh@gmail.com>
# Copyright (c) 2015 Denis Demidov <dennis.demidov@gmail.com>
# Copyright (c) 2015 Eric Pascual <eric@pobot.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------
import sys
import os
import re
from time import sleep
from ev3dev2 import is_micropython
if sys.version_info < (3, 4):
raise SystemError('Must be using Python 3.4 or higher')
if not is_micropython():
import shlex
from subprocess import Popen, PIPE
def _make_scales(notes):
""" Utility function used by Sound class for building the note frequencies table """
res = dict()
for note, freq in notes:
freq = round(freq)
for n in note.split('/'):
res[n.upper()] = freq
return res
def get_command_processes(command):
"""
:param string command: a string of command(s) to run that may include pipes
:return: a list of Popen objects
"""
# We must split command into sub-commands to support pipes
if "|" in command:
command_parts = command.split("|")
else:
command_parts = [command]
processes = []
for command_part in command_parts:
if processes:
processes.append(Popen(shlex.split(command_part), stdin=processes[-1].stdout, stdout=PIPE, stderr=PIPE))
else:
processes.append(Popen(shlex.split(command_part), stdin=None, stdout=PIPE, stderr=PIPE))
return processes
class Sound(object):
"""
Support beep, play wav files, or convert text to speech.
Examples::
from ev3dev2.sound import Sound
spkr = Sound()
# Play 'bark.wav':
spkr.play_file('bark.wav')
# Introduce yourself:
spkr.speak('Hello, I am Robot')
# Play a small song
spkr.play_song((
('D4', 'e3'),
('D4', 'e3'),
('D4', 'e3'),
('G4', 'h'),
('D5', 'h')
))
In order to mimic EV3-G API parameters, durations used in methods
exposed as EV3-G blocks for sound related operations are expressed
as a float number of seconds.
"""
channel = None
# play_types
PLAY_WAIT_FOR_COMPLETE = 0 #: Play the sound and block until it is complete
PLAY_NO_WAIT_FOR_COMPLETE = 1 #: Start playing the sound but return immediately
PLAY_LOOP = 2 #: Never return; start the sound immediately after it completes, until the program is killed
PLAY_TYPES = (PLAY_WAIT_FOR_COMPLETE, PLAY_NO_WAIT_FOR_COMPLETE, PLAY_LOOP)
def _validate_play_type(self, play_type):
assert play_type in self.PLAY_TYPES, \
"Invalid play_type %s, must be one of %s" % (play_type, ','.join(str(t) for t in self.PLAY_TYPES))
def _audio_command(self, command, play_type):
if is_micropython():
if play_type == Sound.PLAY_WAIT_FOR_COMPLETE:
os.system(command)
elif play_type == Sound.PLAY_NO_WAIT_FOR_COMPLETE:
os.system('{} &'.format(command))
elif play_type == Sound.PLAY_LOOP:
while True:
os.system(command)
else:
raise Exception("invalid play_type " % play_type)
return None
else:
with open(os.devnull, 'w'):
if play_type == Sound.PLAY_WAIT_FOR_COMPLETE:
processes = get_command_processes(command)
processes[-1].communicate()
processes[-1].wait()
return None
elif play_type == Sound.PLAY_NO_WAIT_FOR_COMPLETE:
processes = get_command_processes(command)
return processes[-1]
elif play_type == Sound.PLAY_LOOP:
while True:
processes = get_command_processes(command)
processes[-1].communicate()
processes[-1].wait()
else:
raise Exception("invalid play_type " % play_type)
def beep(self, args='', play_type=PLAY_WAIT_FOR_COMPLETE):
"""
Call beep command with the provided arguments (if any).
See `beep man page`_ and google `linux beep music`_ for inspiration.
:param string args: Any additional arguments to be passed to ``beep`` (see the `beep man page`_ for details)
:param play_type: The behavior of ``beep`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the
spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. _`beep man page`: https://linux.die.net/man/1/beep
.. _`linux beep music`: https://www.google.com/search?q=linux+beep+music
"""
return self._audio_command("/usr/bin/beep %s" % args, play_type)
def tone(self, *args, play_type=PLAY_WAIT_FOR_COMPLETE):
"""
.. rubric:: tone(tone_sequence)
Play tone sequence.
Here is a cheerful example::
my_sound = Sound()
my_sound.tone([
(392, 350, 100), (392, 350, 100), (392, 350, 100), (311.1, 250, 100),
(466.2, 25, 100), (392, 350, 100), (311.1, 250, 100), (466.2, 25, 100),
(392, 700, 100), (587.32, 350, 100), (587.32, 350, 100),
(587.32, 350, 100), (622.26, 250, 100), (466.2, 25, 100),
(369.99, 350, 100), (311.1, 250, 100), (466.2, 25, 100), (392, 700, 100),
(784, 350, 100), (392, 250, 100), (392, 25, 100), (784, 350, 100),
(739.98, 250, 100), (698.46, 25, 100), (659.26, 25, 100),
(622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200), (554.36, 350, 100),
(523.25, 250, 100), (493.88, 25, 100), (466.16, 25, 100), (440, 25, 100),
(466.16, 50, 400), (311.13, 25, 200), (369.99, 350, 100),
(311.13, 250, 100), (392, 25, 100), (466.16, 350, 100), (392, 250, 100),
(466.16, 25, 100), (587.32, 700, 100), (784, 350, 100), (392, 250, 100),
(392, 25, 100), (784, 350, 100), (739.98, 250, 100), (698.46, 25, 100),
(659.26, 25, 100), (622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200),
(554.36, 350, 100), (523.25, 250, 100), (493.88, 25, 100),
(466.16, 25, 100), (440, 25, 100), (466.16, 50, 400), (311.13, 25, 200),
(392, 350, 100), (311.13, 250, 100), (466.16, 25, 100),
(392.00, 300, 150), (311.13, 250, 100), (466.16, 25, 100), (392, 700)
])
Have also a look at :py:meth:`play_song` for a more musician-friendly way of doing, which uses
the conventional notation for notes and durations.
:param list[tuple(float,float,float)] tone_sequence: The sequence of tones to play. The first
number of each tuple is frequency in Hz, the second is duration in milliseconds, and the
third is delay in milliseconds between this and the next tone in the sequence.
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the
spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. rubric:: tone(frequency, duration)
Play single tone of given frequency and duration.
:param float frequency: The frequency of the tone in Hz
:param float duration: The duration of the tone in milliseconds
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the
spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
def play_tone_sequence(tone_sequence):
def beep_args(frequency=None, duration=None, delay=None):
args = ''
if frequency is not None:
args += '-f %s ' % frequency
if duration is not None:
args += '-l %s ' % duration
if delay is not None:
args += '-D %s ' % delay
return args
return self.beep(' -n '.join([beep_args(*t) for t in tone_sequence]), play_type=play_type)
if len(args) == 1:
return play_tone_sequence(args[0])
elif len(args) == 2:
return play_tone_sequence([(args[0], args[1])])
else:
raise Exception("Unsupported number of parameters in Sound.tone(): expected 1 or 2, got " + str(len(args)))
def play_tone(self, frequency, duration, delay=0.0, volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Play a single tone, specified by its frequency, duration, volume and final delay.
:param int frequency: the tone frequency, in Hertz
:param float duration: Tone duration, in seconds
:param float delay: Delay after tone, in seconds (can be useful when chaining calls to ``play_tone``)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns
the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: if invalid parameter
"""
self._validate_play_type(play_type)
if duration <= 0:
raise ValueError('invalid duration (%s)' % duration)
if delay < 0:
raise ValueError('invalid delay (%s)' % delay)
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
self.set_volume(volume)
duration_ms = int(duration * 1000)
delay_ms = int(delay * 1000)
self.tone([(frequency, duration_ms, delay_ms)], play_type=play_type)
def play_note(self, note, duration, volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Plays a note, given by its name as defined in ``_NOTE_FREQUENCIES``.
:param string note: The note symbol with its octave number
:param float duration: Tone duration, in seconds
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_note`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns
the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: is invalid parameter (note, duration,...)
"""
self._validate_play_type(play_type)
try:
freq = self._NOTE_FREQUENCIES.get(note.upper(), self._NOTE_FREQUENCIES[note])
except KeyError:
raise ValueError('invalid note (%s)' % note)
if duration <= 0:
raise ValueError('invalid duration (%s)' % duration)
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
return self.play_tone(freq, duration=duration, volume=volume, play_type=play_type)
def play_file(self, wav_file, volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Play a sound file (wav format) at a given volume. The EV3 audio subsystem will work best if
the file is encoded as 16-bit, mono, 22050Hz.
:param string wav_file: The sound file path
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_file`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the
spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
if not wav_file.endswith(".wav"):
raise ValueError('invalid sound file (%s), only .wav files are supported' % wav_file)
if not os.path.exists(wav_file):
raise ValueError("%s does not exist" % wav_file)
self._validate_play_type(play_type)
self.set_volume(volume)
return self._audio_command('/usr/bin/aplay -q "%s"' % wav_file, play_type)
def speak(self, text, espeak_opts='-a 200 -s 130', volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Speak the given text aloud.
Uses the ``espeak`` external command.
:param string text: The text to speak
:param string espeak_opts: ``espeak`` command options (advanced usage)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``speak`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When python3 is used and ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the
spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
self._validate_play_type(play_type)
self.set_volume(volume)
cmd = "/usr/bin/espeak --stdout %s '%s' | /usr/bin/aplay -q" % (espeak_opts, text)
return self._audio_command(cmd, play_type)
def _get_channel(self):
"""
:return: The detected sound channel
:rtype: string
"""
if self.channel is None:
# Get default channel as the first one that pops up in
# 'amixer scontrols' output, which contains strings in the
# following format:
#
# Simple mixer control 'Master',0
# Simple mixer control 'Capture',0
out = os.popen('/usr/bin/amixer scontrols').read()
m = re.search(r"'([^']+)'", out)
if m:
self.channel = m.group(1)
else:
self.channel = 'Playback'
return self.channel
def set_volume(self, pct, channel=None):
"""
Sets the sound volume to the given percentage [0-100] by calling
``amixer -q set <channel> <pct>%``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
"""
if channel is None:
channel = self._get_channel()
os.system('/usr/bin/amixer -q set {0} {1:d}%'.format(channel, pct))
def get_volume(self, channel=None):
"""
Gets the current sound volume by parsing the output of
``amixer get <channel>``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
"""
if channel is None:
channel = self._get_channel()
out = os.popen(['/usr/bin/amixer', 'get', channel]).read()
m = re.search(r'\[(\d+)%\]', out)
if m:
return int(m.group(1))
else:
raise Exception('Failed to parse output of ``amixer get {}``'.format(channel))
def play_song(self, song, tempo=120, delay=0.05):
""" Plays a song provided as a list of tuples containing the note name and its
value using music conventional notation instead of numerical values for frequency
and duration.
It supports symbolic notes (e.g. ``A4``, ``D#3``, ``Gb5``) and durations (e.g. ``q``, ``h``).
You can also specify rests by using ``R`` instead of note pitch.
For an exhaustive list of accepted note symbols and values, have a look at the ``_NOTE_FREQUENCIES``
and ``_NOTE_VALUES`` private dictionaries in the source code.
The value can be suffixed by modifiers:
- a *divider* introduced by a ``/`` to obtain triplets for instance
(e.g. ``q/3`` for a triplet of eight note)
- a *multiplier* introduced by ``*`` (e.g. ``*1.5`` is a dotted note).
Shortcuts exist for common modifiers:
- ``3`` produces a triplet member note. For instance ``e3`` gives a triplet of eight notes,
i.e. 3 eight notes in the duration of a single quarter. You must ensure that 3 triplets
notes are defined in sequence to match the count, otherwise the result will not be the
expected one.
- ``.`` produces a dotted note, i.e. which duration is one and a half the base one. Double dots
are not currently supported.
Example::
>>> # A long time ago in a galaxy far,
>>> # far away...
>>> from ev3dev2.sound import Sound
>>> spkr = Sound()
>>> spkr.play_song((
>>> ('D4', 'e3'), # intro anacrouse
>>> ('D4', 'e3'),
>>> ('D4', 'e3'),
>>> ('G4', 'h'), # meas 1
>>> ('D5', 'h'),
>>> ('C5', 'e3'), # meas 2
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 3
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 4
>>> ('B4', 'e3'),
>>> ('C5', 'e3'),
>>> ('A4', 'h.'),
>>> ))
.. important::
Only 4/4 signature songs are supported with respect to note durations.
:param iterable[tuple(string,string)] song: the song
:param int tempo: the song tempo, given in quarters per minute
:param float delay: delay between notes (in seconds)
:return: When python3 is used the spawn subprocess from ``subprocess.Popen`` is returned; ``None`` otherwise
:raises ValueError: if invalid note in song or invalid play parameters
"""
if tempo <= 0:
raise ValueError('invalid tempo (%s)' % tempo)
if delay < 0:
raise ValueError('invalid delay (%s)' % delay)
delay_ms = int(delay * 1000)
meas_duration_ms = 60000 / tempo * 4 # we only support 4/4 bars, hence "* 4"
for (note, value) in song:
value = value.lower()
if '/' in value:
base, factor = value.split('/')
factor = float(factor)
elif '*' in value:
base, factor = value.split('*')
factor = float(factor)
elif value.endswith('.'):
base = value[:-1]
factor = 1.5
elif value.endswith('3'):
base = value[:-1]
factor = float(2 / 3)
else:
base = value
factor = 1.0
try:
duration_ms = meas_duration_ms * self._NOTE_VALUES[base] * factor
except KeyError:
raise ValueError('invalid note (%s)' % base)
if note == "R":
sleep(duration_ms / 1000 + delay)
else:
freq = self._NOTE_FREQUENCIES[note.upper()]
self.beep('-f %d -l %d -D %d' % (freq, duration_ms, delay_ms))
#: Note frequencies.
#:
#: This dictionary gives the rounded frequency of a note specified by its
#: standard US abbreviation and its octave number (e.g. ``C3``).
#: Alterations use the ``#`` and ``b`` symbols, respectively for
#: *sharp* and *flat*, between the note code and the octave number (e.g. ``D#4``, ``Gb5``).
_NOTE_FREQUENCIES = _make_scales((
('C0', 16.35),
('C#0/Db0', 17.32),
('D0', 18.35),
('D#0/Eb0', 19.45), # expanded in one entry per symbol by _make_scales
('E0', 20.60),
('F0', 21.83),
('F#0/Gb0', 23.12),
('G0', 24.50),
('G#0/Ab0', 25.96),
('A0', 27.50),
('A#0/Bb0', 29.14),
('B0', 30.87),
('C1', 32.70),
('C#1/Db1', 34.65),
('D1', 36.71),
('D#1/Eb1', 38.89),
('E1', 41.20),
('F1', 43.65),
('F#1/Gb1', 46.25),
('G1', 49.00),
('G#1/Ab1', 51.91),
('A1', 55.00),
('A#1/Bb1', 58.27),
('B1', 61.74),
('C2', 65.41),
('C#2/Db2', 69.30),
('D2', 73.42),
('D#2/Eb2', 77.78),
('E2', 82.41),
('F2', 87.31),
('F#2/Gb2', 92.50),
('G2', 98.00),
('G#2/Ab2', 103.83),
('A2', 110.00),
('A#2/Bb2', 116.54),
('B2', 123.47),
('C3', 130.81),
('C#3/Db3', 138.59),
('D3', 146.83),
('D#3/Eb3', 155.56),
('E3', 164.81),
('F3', 174.61),
('F#3/Gb3', 185.00),
('G3', 196.00),
('G#3/Ab3', 207.65),
('A3', 220.00),
('A#3/Bb3', 233.08),
('B3', 246.94),
('C4', 261.63),
('C#4/Db4', 277.18),
('D4', 293.66),
('D#4/Eb4', 311.13),
('E4', 329.63),
('F4', 349.23),
('F#4/Gb4', 369.99),
('G4', 392.00),
('G#4/Ab4', 415.30),
('A4', 440.00),
('A#4/Bb4', 466.16),
('B4', 493.88),
('C5', 523.25),
('C#5/Db5', 554.37),
('D5', 587.33),
('D#5/Eb5', 622.25),
('E5', 659.25),
('F5', 698.46),
('F#5/Gb5', 739.99),
('G5', 783.99),
('G#5/Ab5', 830.61),
('A5', 880.00),
('A#5/Bb5', 932.33),
('B5', 987.77),
('C6', 1046.50),
('C#6/Db6', 1108.73),
('D6', 1174.66),
('D#6/Eb6', 1244.51),
('E6', 1318.51),
('F6', 1396.91),
('F#6/Gb6', 1479.98),
('G6', 1567.98),
('G#6/Ab6', 1661.22),
('A6', 1760.00),
('A#6/Bb6', 1864.66),
('B6', 1975.53),
('C7', 2093.00),
('C#7/Db7', 2217.46),
('D7', 2349.32),
('D#7/Eb7', 2489.02),
('E7', 2637.02),
('F7', 2793.83),
('F#7/Gb7', 2959.96),
('G7', 3135.96),
('G#7/Ab7', 3322.44),
('A7', 3520.00),
('A#7/Bb7', 3729.31),
('B7', 3951.07),
('C8', 4186.01),
('C#8/Db8', 4434.92),
('D8', 4698.63),
('D#8/Eb8', 4978.03),
('E8', 5274.04),
('F8', 5587.65),
('F#8/Gb8', 5919.91),
('G8', 6271.93),
('G#8/Ab8', 6644.88),
('A8', 7040.00),
('A#8/Bb8', 7458.62),
('B8', 7902.13)))
#: Common note values.
#:
#: See https://en.wikipedia.org/wiki/Note_value
#:
#: This dictionary provides the multiplier to be applied to de whole note duration
#: to obtain subdivisions, given the corresponding symbolic identifier:
#:
#: = ===============================
#: w whole note (UK: semibreve)
#: h half note (UK: minim)
#: q quarter note (UK: crotchet)
#: e eight note (UK: quaver)
#: s sixteenth note (UK: semiquaver)
#: = ===============================
#:
#:
#: Triplets can be obtained by dividing the corresponding reference by 3.
#: For instance, the note value of a eight triplet will be ``NOTE_VALUE['e'] / 3``.
#: It is simpler however to user the ``3`` modifier of notes, as supported by the
#: :py:meth:`Sound.play_song` method.
_NOTE_VALUES = {
'w': 1.,
'h': 1. / 2,
'q': 1. / 4,
'e': 1. / 8,
's': 1. / 16,
}
|
dwalton76/ev3dev-lang-python
|
ev3dev2/sound.py
|
Python
|
mit
| 25,650
|
[
"Galaxy"
] |
9183ec98ca53282b2fc5a7a8281559966898bef8af474a450c7db2be62fd907b
|
import vtk
class Scene:
def __init__(self):
render_window = vtk.vtkRenderWindow()
self.renderer = vtk.vtkRenderer()
render_window.AddRenderer(self.renderer)
self.interactor = vtk.vtkRenderWindowInteractor()
self.interactor.SetRenderWindow(render_window)
camera = vtk.vtkInteractorStyleTrackballCamera()
camera.SetCurrentRenderer(self.renderer)
self.interactor.SetInteractorStyle(camera)
def render_object(self, nodes, edges, radii, color):
polydata = vtk.vtkPolyData()
points = vtk.vtkPoints()
for x, y, z in nodes:
points.InsertNextPoint(x, y, z)
polydata.SetPoints(points)
lines = vtk.vtkCellArray()
for a, b in edges:
id_list = vtk.vtkIdList()
id_list.InsertNextId(a)
id_list.InsertNextId(b)
lines.InsertNextCell(id_list)
polydata.SetLines(lines)
data = vtk.vtkFloatArray()
try:
iter(radii)
except TypeError:
radii = [radii for _ in range(len(nodes))]
for r in radii:
data.InsertNextValue(r)
polydata.GetPointData().SetScalars(data)
# wires
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(polydata)
mapper.ScalarVisibilityOff()
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
self.renderer.AddActor(actor)
# sphere
sphere_source = vtk.vtkSphereSource()
sphere_filter = vtk.vtkGlyph3D()
sphere_filter.SetSourceConnection(sphere_source.GetOutputPort())
sphere_filter.SetInput(polydata)
sphere_filter.GeneratePointIdsOn()
sphere_filter.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(sphere_filter.GetOutputPort())
mapper.ScalarVisibilityOff()
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
self.renderer.AddActor(actor)
# tube
tube_filter = vtk.vtkTubeFilter()
tube_filter.SetInput(polydata)
tube_filter.SetVaryRadiusToVaryRadiusByAbsoluteScalar()
tube_filter.SetNumberOfSides(10)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(tube_filter.GetOutputPort())
mapper.ScalarVisibilityOff()
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
self.renderer.AddActor(actor)
def play(self):
self.interactor.Start()
def visualize(solid_nodes, solid_edges, pore_centers, pore_radii):
scene = Scene()
scene.render_object(solid_nodes, solid_edges, 1, (1,0,0))
scene.render_object(pore_centers, [], pore_radii, (0,0,1))
scene.play()
if __name__ == '__main__':
visualize([(0,0,0),(1,1,1)], [(0,1)], [(20,20,20)], [5])
|
RodericDay/CIF-characterize
|
vtk_tools.py
|
Python
|
mit
| 2,925
|
[
"VTK"
] |
1534a1c114b0613a6e53339cac4fa948d95eafde0354a4a6e8fff746cc640ad3
|
import logging
from cStringIO import StringIO
from math import exp
from lxml import etree
from path import path # NOTE (THK): Only used for detecting presence of syllabus
import requests
from datetime import datetime
import dateutil.parser
from lazy import lazy
from xmodule.seq_module import SequenceDescriptor, SequenceModule
from xmodule.graders import grader_from_conf
from xmodule.tabs import CourseTabList
import json
from xblock.fields import Scope, List, String, Dict, Boolean, Integer
from .fields import Date
from django.utils.timezone import UTC
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
DEFAULT_START_DATE = datetime(2030, 1, 1, tzinfo=UTC())
class StringOrDate(Date):
def from_json(self, value):
"""
Parse an optional metadata key containing a time or a string:
if present, assume it's a string if it doesn't parse.
"""
try:
result = super(StringOrDate, self).from_json(value)
except ValueError:
return value
if result is None:
return value
else:
return result
def to_json(self, value):
"""
Convert a time struct or string to a string.
"""
try:
result = super(StringOrDate, self).to_json(value)
except:
return value
if result is None:
return value
else:
return result
edx_xml_parser = etree.XMLParser(dtd_validation=False, load_dtd=False,
remove_comments=True, remove_blank_text=True)
_cached_toc = {}
class Textbook(object):
def __init__(self, title, book_url):
self.title = title
self.book_url = book_url
@lazy
def start_page(self):
return int(self.table_of_contents[0].attrib['page'])
@lazy
def end_page(self):
# The last page should be the last element in the table of contents,
# but it may be nested. So recurse all the way down the last element
last_el = self.table_of_contents[-1]
while last_el.getchildren():
last_el = last_el[-1]
return int(last_el.attrib['page'])
@lazy
def table_of_contents(self):
"""
Accesses the textbook's table of contents (default name "toc.xml") at the URL self.book_url
Returns XML tree representation of the table of contents
"""
toc_url = self.book_url + 'toc.xml'
# cdodge: I've added this caching of TOC because in Mongo-backed instances (but not Filesystem stores)
# course modules have a very short lifespan and are constantly being created and torn down.
# Since this module in the __init__() method does a synchronous call to AWS to get the TOC
# this is causing a big performance problem. So let's be a bit smarter about this and cache
# each fetch and store in-mem for 10 minutes.
# NOTE: I have to get this onto sandbox ASAP as we're having runtime failures. I'd like to swing back and
# rewrite to use the traditional Django in-memory cache.
try:
# see if we already fetched this
if toc_url in _cached_toc:
(table_of_contents, timestamp) = _cached_toc[toc_url]
age = datetime.now(UTC) - timestamp
# expire every 10 minutes
if age.seconds < 600:
return table_of_contents
except Exception as err:
pass
# Get the table of contents from S3
log.info("Retrieving textbook table of contents from %s" % toc_url)
try:
r = requests.get(toc_url)
except Exception as err:
msg = 'Error %s: Unable to retrieve textbook table of contents at %s' % (err, toc_url)
log.error(msg)
raise Exception(msg)
# TOC is XML. Parse it
try:
table_of_contents = etree.fromstring(r.text)
except Exception as err:
msg = 'Error %s: Unable to parse XML for textbook table of contents at %s' % (err, toc_url)
log.error(msg)
raise Exception(msg)
return table_of_contents
def __eq__(self, other):
return (self.title == other.title and
self.book_url == other.book_url)
def __ne__(self, other):
return not self == other
class TextbookList(List):
def from_json(self, values):
textbooks = []
for title, book_url in values:
try:
textbooks.append(Textbook(title, book_url))
except:
# If we can't get to S3 (e.g. on a train with no internet), don't break
# the rest of the courseware.
log.exception("Couldn't load textbook ({0}, {1})".format(title, book_url))
continue
return textbooks
def to_json(self, values):
json_data = []
for val in values:
if isinstance(val, Textbook):
json_data.append((val.title, val.book_url))
elif isinstance(val, tuple):
json_data.append(val)
else:
continue
return json_data
class CourseFields(object):
lti_passports = List(
display_name=_("LTI Passports"),
help=_("Enter the passports for course LTI tools in the following format: \"id\":\"client_key:client_secret\"."),
scope=Scope.settings
)
textbooks = TextbookList(help="List of pairs of (title, url) for textbooks used in this course",
default=[], scope=Scope.content)
wiki_slug = String(help="Slug that points to the wiki for this course", scope=Scope.content)
enrollment_start = Date(help="Date that enrollment for this class is opened", scope=Scope.settings)
enrollment_end = Date(help="Date that enrollment for this class is closed", scope=Scope.settings)
start = Date(help="Start time when this module is visible",
default=DEFAULT_START_DATE,
scope=Scope.settings)
end = Date(help="Date that this class ends", scope=Scope.settings)
advertised_start = String(
display_name=_("Course Advertised Start Date"),
help=_("Enter the date you want to advertise as the course start date, if this date is different from the set start date. To advertise the set start date, enter null."),
scope=Scope.settings
)
grading_policy = Dict(help="Grading policy definition for this class",
default={"GRADER": [
{
"type": "Homework",
"min_count": 12,
"drop_count": 2,
"short_label": "HW",
"weight": 0.15
},
{
"type": "Lab",
"min_count": 12,
"drop_count": 2,
"weight": 0.15
},
{
"type": "Midterm Exam",
"short_label": "Midterm",
"min_count": 1,
"drop_count": 0,
"weight": 0.3
},
{
"type": "Final Exam",
"short_label": "Final",
"min_count": 1,
"drop_count": 0,
"weight": 0.4
}
],
"GRADE_CUTOFFS": {
"Pass": 0.5
}},
scope=Scope.content)
show_calculator = Boolean(
display_name=_("Show Calculator"),
help=_("Enter true or false. When true, students can see the calculator in the course."),
default=False,
scope=Scope.settings
)
display_name = String(
help=_("Enter the name of the course as it should appear in the edX.org course list."),
default="Empty",
display_name=_("Course Display Name"),
scope=Scope.settings
)
#nicky added here
course_kinds = String(
help=_('Input the kinds of the course'),
default="",
display_name=_("The kinds of the course"),
scope=Scope.settings
)
course_level = String(
help=_('the level of the course.1 is the lowest grade,2 is the middle grade,3 is the highest grade'),
default="",
display_name=_("The level of the Course"),
scope=Scope.settings
)
course_edit_method = String(
display_name=_("Course Editor"),
help=_("Enter the method by which this course is edited (\"XML\" or \"Studio\")."),
default="Studio",
scope=Scope.settings,
deprecated=True # Deprecated because someone would not edit this value within Studio.
)
show_chat = Boolean(
display_name=_("Show Chat Widget"),
help=_("Enter true or false. When true, students can see the chat widget in the course."),
default=False,
scope=Scope.settings
)
tabs = CourseTabList(help="List of tabs to enable in this course", scope=Scope.settings, default=[])
end_of_course_survey_url = String(
display_name=_("Course Survey URL"),
help=_("Enter the URL for the end-of-course survey. If your course does not have a survey, enter null."),
scope=Scope.settings
)
discussion_blackouts = List(
display_name=_("Discussion Blackout Dates"),
help=_("Enter pairs of dates between which students cannot post to discussion forums, formatted as \"YYYY-MM-DD-YYYY-MM-DD\". To specify times as well as dates, format the pairs as \"YYYY-MM-DDTHH:MM-YYYY-MM-DDTHH:MM\" (be sure to include the \"T\" between the date and time)."),
scope=Scope.settings
)
discussion_topics = Dict(
display_name=_("Discussion Topic Mapping"),
help=_("Enter discussion categories in the following format: \"CategoryName\": {\"id\": \"i4x-InstitutionName-CourseNumber-course-CourseRun\"}. For example, one discussion category may be \"Lydian Mode\": {\"id\": \"i4x-UniversityX-MUS101-course-2014_T1\"}."),
scope=Scope.settings
)
discussion_sort_alpha = Boolean(
display_name=_("Discussion Sorting Alphabetical"),
scope=Scope.settings, default=False,
help=_("Enter true or false. If true, discussion categories and subcategories are sorted alphabetically. If false, they are sorted chronologically.")
)
announcement = Date(
display_name=_("Course Announcement Date"),
help=_("Enter the date to announce your course."),
scope=Scope.settings
)
cohort_config = Dict(
display_name=_("Cohort Configuration"),
help=_("Cohorts are not currently supported by edX."),
scope=Scope.settings
)
is_new = Boolean(
display_name=_("Course Is New"),
help=_("Enter true or false. If true, the course appears in the list of new courses on edx.org, and a New! badge temporarily appears next to the course image."),
scope=Scope.settings
)
no_grade = Boolean(
display_name=_("Course Not Graded"),
help=_("Enter true or false. If true, the course will not be graded."),
default=False,
scope=Scope.settings
)
disable_progress_graph = Boolean(
display_name=_("Disable Progress Graph"),
help=_("Enter true or false. If true, students cannot view the progress graph."),
default=False,
scope=Scope.settings
)
pdf_textbooks = List(
display_name=_("PDF Textbooks"),
help=_("List of dictionaries containing pdf_textbook configuration"), scope=Scope.settings
)
html_textbooks = List(
display_name=_("HTML Textbooks"),
help=_("For HTML textbooks that appear as separate tabs in the courseware, enter the name of the tab (usually the name of the book) as well as the URLs and titles of all the chapters in the book."),
scope=Scope.settings
)
remote_gradebook = Dict(
display_name=_("Remote Gradebook"),
help=_("Enter the remote gradebook mapping. Only use this setting when REMOTE_GRADEBOOK_URL has been specified."),
scope=Scope.settings
)
allow_anonymous = Boolean(
display_name=_("Allow Anonymous Discussion Posts"),
help=_("Enter true or false. If true, students can create discussion posts that are anonymous to all users."),
scope=Scope.settings, default=True
)
allow_anonymous_to_peers = Boolean(
display_name=_("Allow Anonymous Discussion Posts to Peers"),
help=_("Enter true or false. If true, students can create discussion posts that are anonymous to other students. This setting does not make posts anonymous to course staff."),
scope=Scope.settings, default=False
)
advanced_modules = List(
display_name=_("Advanced Module List"),
help=_("Enter the names of the advanced components to use in your course."),
scope=Scope.settings
)
has_children = True
checklists = List(scope=Scope.settings,
default=[
{"short_description": _("Getting Started With Studio"),
"items": [{"short_description": _("Add Course Team Members"),
"long_description": _("Grant your collaborators permission to edit your course so you can work together."),
"is_checked": False,
"action_url": "ManageUsers",
"action_text": _("Edit Course Team"),
"action_external": False},
{"short_description": _("Set Important Dates for Your Course"),
"long_description": _("Establish your course's student enrollment and launch dates on the Schedule and Details page."),
"is_checked": False,
"action_url": "SettingsDetails",
"action_text": _("Edit Course Details & Schedule"),
"action_external": False},
{"short_description": _("Draft Your Course's Grading Policy"),
"long_description": _("Set up your assignment types and grading policy even if you haven't created all your assignments."),
"is_checked": False,
"action_url": "SettingsGrading",
"action_text": _("Edit Grading Settings"),
"action_external": False},
{"short_description": _("Explore the Other Studio Checklists"),
"long_description": _("Discover other available course authoring tools, and find help when you need it."),
"is_checked": False,
"action_url": "",
"action_text": "",
"action_external": False}]},
{"short_description": _("Draft a Rough Course Outline"),
"items": [{"short_description": _("Create Your First Section and Subsection"),
"long_description": _("Use your course outline to build your first Section and Subsection."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Set Section Release Dates"),
"long_description": _("Specify the release dates for each Section in your course. Sections become visible to students on their release dates."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Designate a Subsection as Graded"),
"long_description": _("Set a Subsection to be graded as a specific assignment type. Assignments within graded Subsections count toward a student's final grade."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Reordering Course Content"),
"long_description": _("Use drag and drop to reorder the content in your course."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Renaming Sections"),
"long_description": _("Rename Sections by clicking the Section name from the Course Outline."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Deleting Course Content"),
"long_description": _("Delete Sections, Subsections, or Units you don't need anymore. Be careful, as there is no Undo function."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False},
{"short_description": _("Add an Instructor-Only Section to Your Outline"),
"long_description": _("Some course authors find using a section for unsorted, in-progress work useful. To do this, create a section and set the release date to the distant future."),
"is_checked": False,
"action_url": "CourseOutline",
"action_text": _("Edit Course Outline"),
"action_external": False}]},
{"short_description": _("Explore edX's Support Tools"),
"items": [{"short_description": _("Explore the Studio Help Forum"),
"long_description": _("Access the Studio Help forum from the menu that appears when you click your user name in the top right corner of Studio."),
"is_checked": False,
"action_url": "http://help.edge.edx.org/",
"action_text": _("Visit Studio Help"),
"action_external": True},
{"short_description": _("Enroll in edX 101"),
"long_description": _("Register for edX 101, edX's primer for course creation."),
"is_checked": False,
"action_url": "https://edge.edx.org/courses/edX/edX101/How_to_Create_an_edX_Course/about",
"action_text": _("Register for edX 101"),
"action_external": True},
{"short_description": _("Download the Studio Documentation"),
"long_description": _("Download the searchable Studio reference documentation in PDF form."),
"is_checked": False,
"action_url": "http://files.edx.org/Getting_Started_with_Studio.pdf",
"action_text": _("Download Documentation"),
"action_external": True}]},
{"short_description": _("Draft Your Course About Page"),
"items": [{"short_description": _("Draft a Course Description"),
"long_description": _("Courses on edX have an About page that includes a course video, description, and more. Draft the text students will read before deciding to enroll in your course."),
"is_checked": False,
"action_url": "SettingsDetails",
"action_text": _("Edit Course Schedule & Details"),
"action_external": False},
{"short_description": _("Add Staff Bios"),
"long_description": _("Showing prospective students who their instructor will be is helpful. Include staff bios on the course About page."),
"is_checked": False,
"action_url": "SettingsDetails",
"action_text": _("Edit Course Schedule & Details"),
"action_external": False},
{"short_description": _("Add Course FAQs"),
"long_description": _("Include a short list of frequently asked questions about your course."),
"is_checked": False,
"action_url": "SettingsDetails",
"action_text": _("Edit Course Schedule & Details"),
"action_external": False},
{"short_description": _("Add Course Prerequisites"),
"long_description": _("Let students know what knowledge and/or skills they should have before they enroll in your course."),
"is_checked": False,
"action_url": "SettingsDetails",
"action_text": _("Edit Course Schedule & Details"),
"action_external": False}]}
])
info_sidebar_name = String(
display_name=_("Course Info Sidebar Name"),
help=_("Enter the heading that you want students to see above your course handouts on the Course Info page. Your course handouts appear in the right panel of the page."),
scope=Scope.settings, default='Course Handouts')
show_timezone = Boolean(
help="True if timezones should be shown on dates in the courseware. Deprecated in favor of due_date_display_format.",
scope=Scope.settings, default=True
)
due_date_display_format = String(
display_name=_("Due Date Display Format"),
help=_("Enter the format due dates are displayed in. Due dates must be in MM-DD-YYYY, DD-MM-YYYY, YYYY-MM-DD, or YYYY-DD-MM format."),
scope=Scope.settings, default=None
)
enrollment_domain = String(
display_name=_("External Login Domain"),
help=_("Enter the external login method students can use for the course."),
scope=Scope.settings
)
certificates_show_before_end = Boolean(
display_name=_("Certificates Downloadable Before End"),
help=_("Enter true or false. If true, students can download certificates before the course ends, if they've met certificate requirements."),
scope=Scope.settings,
default=False,
deprecated=True
)
certificates_display_behavior = String(
display_name=_("Certificates Display Behavior"),
help=_("Has three possible states: 'end', 'early_with_info', 'early_no_info'. 'end' is the default behavior, where certificates will only appear after a course has ended. 'early_with_info' will display all certificate information before a course has ended. 'early_no_info' will hide all certificate information unless a student has earned a certificate."),
scope=Scope.settings,
default="end"
)
course_image = String(
display_name=_("Course About Page Image"),
help=_("Edit the name of the course image file. You must upload this file on the Files & Uploads page. You can also set the course image on the Settings & Details page."),
scope=Scope.settings,
# Ensure that courses imported from XML keep their image
default="images_course_image.jpg"
)
## Course level Certificate Name overrides.
cert_name_short = String(
help=_("Between quotation marks, enter the short name of the course to use on the certificate that students receive when they complete the course."),
display_name=_("Certificate Name (Short)"),
scope=Scope.settings,
default=""
)
cert_name_long = String(
help=_("Between quotation marks, enter the long name of the course to use on the certificate that students receive when they complete the course."),
display_name=_("Certificate Name (Long)"),
scope=Scope.settings,
default=""
)
# An extra property is used rather than the wiki_slug/number because
# there are courses that change the number for different runs. This allows
# courses to share the same css_class across runs even if they have
# different numbers.
#
# TODO get rid of this as soon as possible or potentially build in a robust
# way to add in course-specific styling. There needs to be a discussion
# about the right way to do this, but arjun will address this ASAP. Also
# note that the courseware template needs to change when this is removed.
css_class = String(
display_name=_("CSS Class for Course Reruns"),
help=_("Allows courses to share the same css class across runs even if they have different numbers."),
scope=Scope.settings, default="",
deprecated=True
)
# TODO: This is a quick kludge to allow CS50 (and other courses) to
# specify their own discussion forums as external links by specifying a
# "discussion_link" in their policy JSON file. This should later get
# folded in with Syllabus, Course Info, and additional Custom tabs in a
# more sensible framework later.
discussion_link = String(
display_name=_("Discussion Forum External Link"),
help=_("Allows specification of an external link to replace discussion forums."),
scope=Scope.settings,
deprecated=True
)
# TODO: same as above, intended to let internal CS50 hide the progress tab
# until we get grade integration set up.
# Explicit comparison to True because we always want to return a bool.
hide_progress_tab = Boolean(
display_name=_("Hide Progress Tab"),
help=_("Allows hiding of the progress tab."),
scope=Scope.settings,
deprecated=True
)
display_organization = String(
display_name=_("Course Organization Display String"),
help=_("Enter the course organization that you want to appear in the courseware. This setting overrides the organization that you entered when you created the course. To use the organization that you entered when you created the course, enter null."),
scope=Scope.settings
)
display_coursenumber = String(
display_name=_("Course Number Display String"),
help=_("Enter the course number that you want to appear in the courseware. This setting overrides the course number that you entered when you created the course. To use the course number that you entered when you created the course, enter null."),
scope=Scope.settings
)
max_student_enrollments_allowed = Integer(
display_name=_("Course Maximum Student Enrollment"),
help=_("Enter the maximum number of students that can enroll in the course. To allow an unlimited number of students, enter null."),
scope=Scope.settings
)
allow_public_wiki_access = Boolean(display_name=_("Allow Public Wiki Access"),
help=_("Enter true or false. If true, edX users can view the course wiki even if they're not enrolled in the course."),
default=False,
scope=Scope.settings)
invitation_only = Boolean(display_name=_("Invitation Only"),
help="Whether to restrict enrollment to invitation by the course staff.",
default=False,
scope=Scope.settings)
class CourseDescriptor(CourseFields, SequenceDescriptor):
module_class = SequenceModule
def __init__(self, *args, **kwargs):
"""
Expects the same arguments as XModuleDescriptor.__init__
"""
super(CourseDescriptor, self).__init__(*args, **kwargs)
_ = self.runtime.service(self, "i18n").ugettext
if self.wiki_slug is None:
self.wiki_slug = self.location.course
if self.due_date_display_format is None and self.show_timezone is False:
# For existing courses with show_timezone set to False (and no due_date_display_format specified),
# set the due_date_display_format to what would have been shown previously (with no timezone).
# Then remove show_timezone so that if the user clears out the due_date_display_format,
# they get the default date display.
self.due_date_display_format = "DATE_TIME"
delattr(self, 'show_timezone')
# NOTE: relies on the modulestore to call set_grading_policy() right after
# init. (Modulestore is in charge of figuring out where to load the policy from)
# NOTE (THK): This is a last-minute addition for Fall 2012 launch to dynamically
# disable the syllabus content for courses that do not provide a syllabus
if self.system.resources_fs is None:
self.syllabus_present = False
else:
self.syllabus_present = self.system.resources_fs.exists(path('syllabus'))
self._grading_policy = {}
self.set_grading_policy(self.grading_policy)
if self.discussion_topics == {}:
self.discussion_topics = {_('General'): {'id': self.location.html_id()}}
if not getattr(self, "tabs", []):
CourseTabList.initialize_default(self)
def set_grading_policy(self, course_policy):
"""
The JSON object can have the keys GRADER and GRADE_CUTOFFS. If either is
missing, it reverts to the default.
"""
if course_policy is None:
course_policy = {}
# Load the global settings as a dictionary
grading_policy = self.grading_policy
# BOY DO I HATE THIS grading_policy CODE ACROBATICS YET HERE I ADD MORE (dhm)--this fixes things persisted w/
# defective grading policy values (but not None)
if 'GRADER' not in grading_policy:
grading_policy['GRADER'] = CourseFields.grading_policy.default['GRADER']
if 'GRADE_CUTOFFS' not in grading_policy:
grading_policy['GRADE_CUTOFFS'] = CourseFields.grading_policy.default['GRADE_CUTOFFS']
# Override any global settings with the course settings
grading_policy.update(course_policy)
# Here is where we should parse any configurations, so that we can fail early
# Use setters so that side effecting to .definitions works
self.raw_grader = grading_policy['GRADER'] # used for cms access
self.grade_cutoffs = grading_policy['GRADE_CUTOFFS']
@classmethod
def read_grading_policy(cls, paths, system):
"""Load a grading policy from the specified paths, in order, if it exists."""
# Default to a blank policy dict
policy_str = '{}'
for policy_path in paths:
if not system.resources_fs.exists(policy_path):
continue
log.debug("Loading grading policy from {0}".format(policy_path))
try:
with system.resources_fs.open(policy_path) as grading_policy_file:
policy_str = grading_policy_file.read()
# if we successfully read the file, stop looking at backups
break
except (IOError):
msg = "Unable to load course settings file from '{0}'".format(policy_path)
log.warning(msg)
return policy_str
@classmethod
def from_xml(cls, xml_data, system, id_generator):
instance = super(CourseDescriptor, cls).from_xml(xml_data, system, id_generator)
# bleh, have to parse the XML here to just pull out the url_name attribute
# I don't think it's stored anywhere in the instance.
course_file = StringIO(xml_data.encode('ascii', 'ignore'))
xml_obj = etree.parse(course_file, parser=edx_xml_parser).getroot()
policy_dir = None
url_name = xml_obj.get('url_name', xml_obj.get('slug'))
if url_name:
policy_dir = 'policies/' + url_name
# Try to load grading policy
paths = ['grading_policy.json']
if policy_dir:
paths = [policy_dir + '/grading_policy.json'] + paths
try:
policy = json.loads(cls.read_grading_policy(paths, system))
except ValueError:
system.error_tracker("Unable to decode grading policy as json")
policy = {}
# now set the current instance. set_grading_policy() will apply some inheritance rules
instance.set_grading_policy(policy)
return instance
@classmethod
def definition_from_xml(cls, xml_object, system):
textbooks = []
for textbook in xml_object.findall("textbook"):
textbooks.append((textbook.get('title'), textbook.get('book_url')))
xml_object.remove(textbook)
# Load the wiki tag if it exists
wiki_slug = None
wiki_tag = xml_object.find("wiki")
if wiki_tag is not None:
wiki_slug = wiki_tag.attrib.get("slug", default=None)
xml_object.remove(wiki_tag)
definition, children = super(CourseDescriptor, cls).definition_from_xml(xml_object, system)
definition['textbooks'] = textbooks
definition['wiki_slug'] = wiki_slug
return definition, children
def definition_to_xml(self, resource_fs):
xml_object = super(CourseDescriptor, self).definition_to_xml(resource_fs)
if len(self.textbooks) > 0:
textbook_xml_object = etree.Element('textbook')
for textbook in self.textbooks:
textbook_xml_object.set('title', textbook.title)
textbook_xml_object.set('book_url', textbook.book_url)
xml_object.append(textbook_xml_object)
if self.wiki_slug is not None:
wiki_xml_object = etree.Element('wiki')
wiki_xml_object.set('slug', self.wiki_slug)
xml_object.append(wiki_xml_object)
return xml_object
def has_ended(self):
"""
Returns True if the current time is after the specified course end date.
Returns False if there is no end date specified.
"""
if self.end is None:
return False
return datetime.now(UTC()) > self.end
def may_certify(self):
"""
Return True if it is acceptable to show the student a certificate download link
"""
show_early = self.certificates_display_behavior in ('early_with_info', 'early_no_info') or self.certificates_show_before_end
return show_early or self.has_ended()
def has_started(self):
return datetime.now(UTC()) > self.start
@property
def grader(self):
return grader_from_conf(self.raw_grader)
@property
def raw_grader(self):
# force the caching of the xblock value so that it can detect the change
# pylint: disable=pointless-statement
self.grading_policy['GRADER']
return self._grading_policy['RAW_GRADER']
@raw_grader.setter
def raw_grader(self, value):
# NOTE WELL: this change will not update the processed graders. If we need that, this needs to call grader_from_conf
self._grading_policy['RAW_GRADER'] = value
self.grading_policy['GRADER'] = value
@property
def grade_cutoffs(self):
return self._grading_policy['GRADE_CUTOFFS']
@grade_cutoffs.setter
def grade_cutoffs(self, value):
self._grading_policy['GRADE_CUTOFFS'] = value
# XBlock fields don't update after mutation
policy = self.grading_policy
policy['GRADE_CUTOFFS'] = value
self.grading_policy = policy
@property
def lowest_passing_grade(self):
return min(self._grading_policy['GRADE_CUTOFFS'].values())
@property
def is_cohorted(self):
"""
Return whether the course is cohorted.
"""
config = self.cohort_config
if config is None:
return False
return bool(config.get("cohorted"))
@property
def auto_cohort(self):
"""
Return whether the course is auto-cohorted.
"""
if not self.is_cohorted:
return False
return bool(self.cohort_config.get(
"auto_cohort", False))
@property
def auto_cohort_groups(self):
"""
Return the list of groups to put students into. Returns [] if not
specified. Returns specified list even if is_cohorted and/or auto_cohort are
false.
"""
if self.cohort_config is None:
return []
else:
return self.cohort_config.get("auto_cohort_groups", [])
@property
def top_level_discussion_topic_ids(self):
"""
Return list of topic ids defined in course policy.
"""
topics = self.discussion_topics
return [d["id"] for d in topics.values()]
@property
def cohorted_discussions(self):
"""
Return the set of discussions that is explicitly cohorted. It may be
the empty set. Note that all inline discussions are automatically
cohorted based on the course's is_cohorted setting.
"""
config = self.cohort_config
if config is None:
return set()
return set(config.get("cohorted_discussions", []))
@property
def is_newish(self):
"""
Returns if the course has been flagged as new. If
there is no flag, return a heuristic value considering the
announcement and the start dates.
"""
flag = self.is_new
if flag is None:
# Use a heuristic if the course has not been flagged
announcement, start, now = self._sorting_dates()
if announcement and (now - announcement).days < 30:
# The course has been announced for less that month
return True
elif (now - start).days < 1:
# The course has not started yet
return True
else:
return False
elif isinstance(flag, basestring):
return flag.lower() in ['true', 'yes', 'y']
else:
return bool(flag)
@property
def sorting_score(self):
"""
Returns a tuple that can be used to sort the courses according
the how "new" they are. The "newness" score is computed using a
heuristic that takes into account the announcement and
(advertized) start dates of the course if available.
The lower the number the "newer" the course.
"""
# Make courses that have an announcement date shave a lower
# score than courses than don't, older courses should have a
# higher score.
announcement, start, now = self._sorting_dates()
scale = 300.0 # about a year
if announcement:
days = (now - announcement).days
score = -exp(-days / scale)
else:
days = (now - start).days
score = exp(days / scale)
return score
def _sorting_dates(self):
# utility function to get datetime objects for dates used to
# compute the is_new flag and the sorting_score
announcement = self.announcement
if announcement is not None:
announcement = announcement
try:
start = dateutil.parser.parse(self.advertised_start)
if start.tzinfo is None:
start = start.replace(tzinfo=UTC())
except (ValueError, AttributeError):
start = self.start
now = datetime.now(UTC())
return announcement, start, now
@lazy
def grading_context(self):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_descriptor" : The section descriptor
"xmoduledescriptors" : An array of xmoduledescriptors that
could possibly be in the section, for any student
all_descriptors - This contains a list of all xmodules that can
effect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
all_descriptors = []
graded_sections = {}
def yield_descriptor_descendents(module_descriptor):
for child in module_descriptor.get_children():
yield child
for module_descriptor in yield_descriptor_descendents(child):
yield module_descriptor
for c in self.get_children():
for s in c.get_children():
if s.graded:
xmoduledescriptors = list(yield_descriptor_descendents(s))
xmoduledescriptors.append(s)
# The xmoduledescriptors included here are only the ones that have scores.
section_description = {
'section_descriptor': s,
'xmoduledescriptors': filter(lambda child: child.has_score, xmoduledescriptors)
}
section_format = s.format if s.format is not None else ''
graded_sections[section_format] = graded_sections.get(section_format, []) + [section_description]
all_descriptors.extend(xmoduledescriptors)
all_descriptors.append(s)
return {'graded_sections': graded_sections,
'all_descriptors': all_descriptors, }
@staticmethod
def make_id(org, course, url_name):
return '/'.join([org, course, url_name])
@property
def id(self):
"""Return the course_id for this course"""
return self.location.course_key
@property
def start_date_text(self):
"""
Returns the desired text corresponding the course's start date. Prefers .advertised_start,
then falls back to .start
"""
i18n = self.runtime.service(self, "i18n")
_ = i18n.ugettext
strftime = i18n.strftime
def try_parse_iso_8601(text):
try:
result = Date().from_json(text)
if result is None:
result = text.title()
else:
result = strftime(result, "SHORT_DATE")
except ValueError:
result = text.title()
return result
if isinstance(self.advertised_start, basestring):
return try_parse_iso_8601(self.advertised_start)
elif self.start_date_is_still_default:
# Translators: TBD stands for 'To Be Determined' and is used when a course
# does not yet have an announced start date.
return _('TBD')
else:
when = self.advertised_start or self.start
return strftime(when, "SHORT_DATE")
@property
def start_date_is_still_default(self):
"""
Checks if the start date set for the course is still default, i.e. .start has not been modified,
and .advertised_start has not been set.
"""
return self.advertised_start is None and self.start == CourseFields.start.default
@property
def end_date_text(self):
"""
Returns the end date for the course formatted as a string.
If the course does not have an end date set (course.end is None), an empty string will be returned.
"""
if self.end is None:
return ''
else:
strftime = self.runtime.service(self, "i18n").strftime
return strftime(self.end, "SHORT_DATE")
@property
def forum_posts_allowed(self):
date_proxy = Date()
try:
blackout_periods = [(date_proxy.from_json(start),
date_proxy.from_json(end))
for start, end
in self.discussion_blackouts]
now = datetime.now(UTC())
for start, end in blackout_periods:
if start <= now <= end:
return False
except:
log.exception("Error parsing discussion_blackouts for course {0}".format(self.id))
return True
@property
def number(self):
return self.location.course
@property
def display_number_with_default(self):
"""
Return a display course number if it has been specified, otherwise return the 'course' that is in the location
"""
if self.display_coursenumber:
return self.display_coursenumber
return self.number
@property
def org(self):
return self.location.org
@property
def display_org_with_default(self):
"""
Return a display organization if it has been specified, otherwise return the 'org' that is in the location
"""
if self.display_organization:
return self.display_organization
return self.org
@property
def get_course_kinds(self):
if self.course_kinds:
return self.course_kinds
return ""
@property
def get_course_level(self):
if self.course_level:
return self.course_level
return ""
|
nicky-ji/edx-nicky
|
common/lib/xmodule/xmodule/course_module.py
|
Python
|
agpl-3.0
| 48,311
|
[
"VisIt"
] |
6e6fe6ffee3c0e33950ca46b25ce04002824b82116518bc396dae83abd07c612
|
# freeseer - vga/presentation capture software
#
# Copyright (C) 2011, 2013 Free and Open Source Software Learning Centre
# http://fosslc.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# For support, questions, suggestions or any other inquiries, visit:
# http://github.com/Freeseer/freeseer/
'''
Video Preview
-------------
An output plugin which provides a video window to preview the video that
is being recorded in real time.
@author: Thanh Ha
'''
# GStreamer
import pygst
pygst.require("0.10")
import gst
# PyQT
from PyQt4.QtCore import SIGNAL
# Freeseer
from freeseer.framework.plugin import IOutput
from freeseer.framework.config import Config, options
# .freeseer-plugin custom
import widget
# Leaky Queue
LEAKY_VALUES = ["no", "upstream", "downstream"]
class VideoPreviewConfig(Config):
"""Configuration class for VideoPreview plugin."""
# Video Preview variables
previewsink = options.StringOption("autovideosink")
leakyqueue = options.ChoiceOption(LEAKY_VALUES, "no")
class VideoPreview(IOutput):
name = "Video Preview"
os = ["linux", "linux2", "win32", "cygwin", "darwin"]
type = IOutput.VIDEO
recordto = IOutput.OTHER
CONFIG_CLASS = VideoPreviewConfig
def get_output_bin(self, audio=False, video=True, metadata=None):
bin = gst.Bin()
# Leaky queue necessary to work with rtmp streaming
videoqueue = gst.element_factory_make("queue", "videoqueue")
videoqueue.set_property("leaky", self.config.leakyqueue)
bin.add(videoqueue)
cspace = gst.element_factory_make("ffmpegcolorspace", "cspace")
bin.add(cspace)
videosink = gst.element_factory_make(self.config.previewsink, "videosink")
bin.add(videosink)
# Setup ghost pad
pad = videoqueue.get_pad("sink")
ghostpad = gst.GhostPad("sink", pad)
bin.add_pad(ghostpad)
# Link Elements
videoqueue.link(cspace)
cspace.link(videosink)
return bin
def get_widget(self):
if self.widget is None:
self.widget = widget.ConfigWidget()
self.widget.leakyQueueComboBox.addItems(LEAKY_VALUES)
return self.widget
def __enable_connections(self):
self.widget.connect(self.widget.previewComboBox, SIGNAL('currentIndexChanged(const QString&)'), self.set_previewsink)
self.widget.connect(self.widget.leakyQueueComboBox, SIGNAL('currentIndexChanged(const QString&)'), self.set_leakyqueue)
def widget_load_config(self, plugman):
self.load_config(plugman)
previewIndex = self.widget.previewComboBox.findText(self.config.previewsink)
self.widget.previewComboBox.setCurrentIndex(previewIndex)
leakyQueueIndex = self.widget.leakyQueueComboBox.findText(self.config.leakyqueue)
self.widget.leakyQueueComboBox.setCurrentIndex(leakyQueueIndex)
# Finally enable connections
self.__enable_connections()
def set_previewsink(self, previewsink):
self.config.previewsink = previewsink
self.config.save()
def set_leakyqueue(self, value):
self.config.leakyqueue = value
self.config.save()
###
### Translations
###
def retranslate(self):
self.widget.previewLabel.setText(self.gui.app.translate('plugin-videopreview', 'Preview'))
self.widget.leakyQueueLabel.setText(self.gui.app.translate('plugin-videopreview', 'Leaky Queue'))
|
Freeseer/freeseer
|
src/freeseer/plugins/output/videopreview/__init__.py
|
Python
|
gpl-3.0
| 4,047
|
[
"VisIt"
] |
6fffd2184f490ec39a5993eb2d6388186478c5777c020064759590f9fda39c9c
|
#!/usr/bin/env python
# GOAL: measure completeness of 24um scans
#
# PROCEDURE:
# - run sextractor on image to create segmentation map
# - place artificial sources in areas where there is not existing source
# - rerun sextractor to detect artificial images
#
#
#
from pylab import *
# run sextractor
# find positions of real galaxies
# place artificial galaxies on image
# run sextractor
# detect galaxies
def readsexfile(file):
infile=open(file,'r')
for line in infile:
if line.find('#') > -1: #skip lines with '#' in them
continue
def completeness():#measure completeness on final image
file='mosaic_extract_final.tbl'
input=open(file,'r')
xgal=[]#positions of previous detections with snr > 3
ygal=[]
fap4gal=[]
for line in input:
if line.find('#') > -1: #skip lines with '#' in them
continue
if line.find('\\') > -1: #skip lines with '#' in them
continue
if line.find('|') > -1: #skip lines with '#' in them
continue
t=line.split()
xgal.append(float(t[8]))
ygal.append(float(t[10]))
fap4gal.append(float(t[28]))
input.close()
xgal=N.array(xgal,'f')
ygal=N.array(ygal,'f')
fsimall=[]
matchflagsimall=[]
f2all=[]
f3all=[]
f4all=[]
deblendsimall=[]
snrsimall=[]
myminmag=24.75
mymaxmag=27.4
myfmin=10.**((25.-mymaxmag)/2.5)#ZP=25
myfmax=10.**((25.-myminmag)/2.5)#ZP=25
#below is loop to create image w/artificial sources, extract, and compare
for k in range(100):
createflag=1.#create image w/artificial sources?
detectflag=1.#detect sources in image?
if createflag > 0.1:
xnoise=[]
ynoise=[]
infile=open('noisecoords.dat','r')#read in list of coordinates corresponding to positions where no real source exists. These are generated by spitzergetnoise.py.
for line in infile:
t=line.split()
xnoise.append(float(t[0]))
ynoise.append(float(t[1]))
infile.close()
nstar=10
xsim=N.zeros(nstar,'d')
ysim=N.zeros(nstar,'d')
msim=N.zeros(nstar,'d')
outfile=open('stars.coords.dat','w')
for i in range(nstar):
#j=int(round(1.*len(xnoise)*random.uniform(0,1)))
#xsim[i]=xnoise[j]
#ysim[i]=ynoise[j]
j=0
for j in range(10000):
xt=int(round(random.uniform(5.,125.)))
yt=int(round(random.uniform(5.,140.)))
d=pylab.sqrt((xt-xgal)**2+(yt-ygal)**2)#make sure sim galaxies are not near real galaxies
if min(d) > -1.:
d2=pylab.sqrt((xt-xsim)**2+(yt-ysim)**2)#make sure sim points are not on top of each other
if min(d2) > 5.:
print i,'got a good point after ',j,' tries',xt,yt
break
j=j+1
xsim[i]=xt
ysim[i]=yt
k=random.uniform(myfmin,myfmax)
msim[i]=25.-2.5*pylab.log10(k)
#print k,msim[i]
s='%8.2f %8.2f %8.2f \n' % (xsim[i],ysim[i],msim[i])
outfile.write(s)
outfile.close()
os.system('rm mosaic-completeness.fits')
#iraf.mkobjects(input='mosaic_minus_median_extract.fits',output='mosaic-completeness.fits',objects='stars.coords.dat',radius=1.13,magzero=25.,background=0.,gain=5.,rdnoise=0.,poisson='no')#don't convolve w/PRF
#os.system('cp ../cal/MIPS24_PRF_HD_center.fits .')#convolve star w/SSC PRF
os.system('cp ../cal/mips24_prf_mosaic_2.45_4x.fits .')#convolve star w/SSC PRF
iraf.mkobjects(input='mosaic_minus_median_extract.fits',output='mosaic-completeness.fits',objects='stars.coords.dat',radius=14,star='mips24_prf_mosaic_2.45_4x.fits',magzero=25.,background=0.,gain=5.,rdnoise=0.,poisson='no')
os.system('ls *.fits')
os.system('pwd')
iraf.display('mosaic_minus_median_extract.fits',1,contrast=0.01)
iraf.display('mosaic-completeness.fits',2,contrast=0.01)
iraf.tvmark(1,'stars.coords.dat')
iraf.tvmark(2,'stars.coords.dat')
fsim=10.**((25.-msim)/2.5)#ZP=25
if createflag < .1:#read in positions and magnitudes of artdata sources
xsim=[]
ysim=[]
msim=[]
infile=open('stars.coords.dat','r')
for line in infile:
if line.find('#') > -1:
continue
t=line.split()
xsim.append(float(t[0]))
ysim.append(float(t[1]))
msim.append(float(t[2]))
infile.close()
xsim=N.array(xsim,'f')
ysim=N.array(ysim,'f')
msim=N.array(msim,'f')
fsim=10.**((25.-msim)/2.5)#ZP=25
if detectflag > 0.1:#now run detection on mosaic-completeness.fits
if SqDegS > 0.1:
combinepath=bcdpath
else:
combinepath=bcdpath+'pbcd/Combine/'
os.chdir(combinepath)
print combinepath
#os.system('apex_1frame.pl -n apex_1frame_MIPS24_step2.nl -i output_apex_step2/mosaic-completeness.fits')
#combinepath=bcdpath+'pbcd/Combine/output_apex_step2'
if SqDegS > 0.1:
s='cp /Users/rfinn/clusters/spitzer/apex_1frame_step2all_400.nl '+bcdpath+'cdf/'
os.system(s)
os.system('apex_1frame.pl -n apex_1frame_step2all_400.nl -i output_apex_step2/mosaic-completeness.fits')
combinepath=bcdpath+'output_apex_step2/'
else:
os.system('apex_1frame.pl -n apex_1frame_step2all.nl -i apex_1frame_step2/mosaic-completeness.fits')
combinepath=bcdpath+'pbcd/Combine/apex_1frame_step2'
os.chdir(combinepath)
print combinepath
file='mosaic-completeness_extract_raw.tbl'
input=open(file,'r')
ngal=0
for line in input:
if line.find('Conversion') > -1:
t=line.split('=')
convfactor=float(t[1])#conversion from ADU to uJy
#aperr=aveaperr*convfactor #convert noise in ADU to uJy using conv factor from apex
print "Conversion Factor = ",convfactor
#print "aveaperr = ",aveaperr
#print "aperr = ",aperr
continue
if line.find('#') > -1: #skip lines with '#' in them
continue
if line.find('\\') > -1: #skip lines with '#' in them
continue
if line.find('|') > -1: #skip lines with '#' in them
continue
ngal=ngal+1
input.close()
id24 = N.zeros(ngal,'f')
imagex24 = N.zeros(ngal,'f')
imagey24 = N.zeros(ngal,'f')
ra24 = N.zeros(ngal,'f')
dec24 = N.zeros(ngal,'f')
f24 = N.zeros(ngal,'d')#flux
errf24 = N.zeros(ngal,'d')
fap1 = N.zeros(ngal,'d')#flux in aperture 1 (1,1.5,2,2.6,3,3.5,4,4.5,5.,5.5) pixels
fap2 = N.zeros(ngal,'d')#flux
fap3 = N.zeros(ngal,'d')#flux
fap4 = N.zeros(ngal,'d')#flux in ap 4 - this is one w/ap cor of 1.67 (Calzetti et al 2007)
fap5 = N.zeros(ngal,'d')#flux
fap6 = N.zeros(ngal,'d')#flux
fap7 = N.zeros(ngal,'d')#flux
fap8 = N.zeros(ngal,'d')#flux
fap9 = N.zeros(ngal,'d')#flux
fap10 = N.zeros(ngal,'d')#flux
snr24 = N.zeros(ngal,'d')#SNR calculated by mopex
deblend = N.zeros(ngal,'f')#SNR calculated by mopex
input=open(file,'r')
i=0
output=open('xy24raw.dat','w')
for line in input:
if line.find('#') > -1: #skip lines with '#' in them
continue
if line.find('\\') > -1: #skip lines with '#' in them
continue
if line.find('|') > -1: #skip lines with '#' in them
continue
t=line.split()
#print "length of t = ",len(t)
#print (t[8]),(t[10]),(t[13]),(t[14]),(t[18]),(t[2]),(t[23]),(t[24]),(t[25]),(t[26]),(t[27]),(t[28]),(t[29]),(t[30]),(t[31]),(t[32])
(imagex24[i],imagey24[i],f24[i],errf24[i],snr24[i],deblend[i],fap1[i],fap2[i],fap3[i],fap4[i],fap5[i],fap6[i],fap7[i],fap8[i],fap9[i],fap10[i])=(float(t[8]),float(t[10]),float(t[13]),float(t[14]),float(t[18]),float(t[2]),float(t[25]),float(t[26]),float(t[27]),float(t[28]),float(t[29]),float(t[30]),float(t[31]),float(t[32]),float(t[33]),float(t[34]))
s='%6.2f %6.2f \n'%(imagex24[i],imagey24[i])
output.write(s)
i=i+1
input.close()#44 -> 43
output.close()
iraf.tvmark(1,'xy24raw.dat',color=204,radi=2)
iraf.tvmark(2,'xy24raw.dat',color=204,radi=2)
delta=1.#max number of pixels for a match
#get rid of objects that were detected in original image. Otherwise, matching will think any object near a sim galaxy is the sim galaxy. A faint galaxy placed on type of a pre-existing bright galaxy will be detected.
newgalflag=N.ones(len(imagex24),'i')
for i in range(len(imagex24)):
(imatch, matchflag,nmatch)=findnearest(imagex24[i],imagey24[i],xgal,ygal,delta)
if matchflag > 0.:
dflux=abs(fap4gal[imatch] - fap4[i])/fap4[i]
if dflux < .1:#position of real galaxy, flux difference less than 10% -> not a new galaxy
newgalflag[i] = 0
#keep only galaxies that are new
imagex24 = N.compress(newgalflag,imagex24)
imagey24 = N.compress(newgalflag,imagey24)
fap1 = N.compress(newgalflag,fap1)
fap2 = N.compress(newgalflag,fap2)
fap3 = N.compress(newgalflag,fap3)
fap4 = N.compress(newgalflag,fap4)
fap5 = N.compress(newgalflag,fap5)
fap6 = N.compress(newgalflag,fap6)
fap7 = N.compress(newgalflag,fap7)
fap8 = N.compress(newgalflag,fap8)
fap9 = N.compress(newgalflag,fap9)
fap10 =N.compress(newgalflag,fap10)
snr24 =N.compress(newgalflag,snr24)
deblend = N.compress(newgalflag,deblend)
delta=2.#max number of pixels for a match
matchflagsim=N.zeros(len(xsim),'i')
fmeas1=N.zeros(len(xsim),'f')
fmeas2=N.zeros(len(xsim),'f')
fmeas3=N.zeros(len(xsim),'f')
fmeas4=N.zeros(len(xsim),'f')
fmeas5=N.zeros(len(xsim),'f')
fmeas6=N.zeros(len(xsim),'f')
fmeas7=N.zeros(len(xsim),'f')
fmeas8=N.zeros(len(xsim),'f')
fmeas9=N.zeros(len(xsim),'f')
fmeas10=N.zeros(len(xsim),'f')
fmeas24=N.zeros(len(xsim),'f')
deblendsim=N.zeros(len(xsim),'f')
snrsim=N.zeros(len(xsim),'f')
for i in range(len(xsim)):
(imatch, matchflag,nmatch)=findnearest(xsim[i],ysim[i],imagex24,imagey24,delta)
matchflagsim[i]=matchflag
if matchflag > .1:
fmeas1[i]=fap1[int(imatch)]
fmeas2[i]=fap2[int(imatch)]
fmeas3[i]=fap3[int(imatch)]
fmeas4[i]=fap4[int(imatch)]
fmeas5[i]=fap5[int(imatch)]
fmeas6[i]=fap6[int(imatch)]
fmeas7[i]=fap7[int(imatch)]
fmeas8[i]=fap8[int(imatch)]
fmeas9[i]=fap9[int(imatch)]
fmeas10[i]=fap10[int(imatch)]
fmeas24[i]=f24[int(imatch)]
deblendsim[i]=deblend[int(imatch)]
snrsim[i]=snr24[int(imatch)]
fsimall=fsimall+list(fsim)
matchflagsimall=matchflagsimall+list(matchflagsim)
f2all=f2all+list(fmeas2)
f3all=f3all+list(fmeas3)
f4all=f4all+list(fmeas4)
deblendsimall=deblendsimall+list(deblendsim)
snrsimall=snrsimall+list(snrsim)
fsim=N.array(fsimall,'f')
matchflagsim=N.array(matchflagsimall,'f')
fmeas2=N.array(f2all,'f')
fmeas3=N.array(f3all,'f')
fmeas4=N.array(f4all,'f')
deblendsim=N.array(deblendsimall,'f')
snrsim=N.array(snrsimall,'f')
#make plots using all realizations
pylab.cla()
pylab.clf()
fsim=fsim*convfactor
fs=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fsim)
#f1=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fmeas1)
f2=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fmeas2)
f3=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fmeas3)
f4=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fmeas4)
#f242=pylab.compress((matchflagsim > 0.1) & (deblendsim < 1.5),fmeas24)
r4=pylab.median(fs/f4)
r3=pylab.median(fs/f3)
r2=pylab.median(fs/f2)
print "average ratios ap 4",pylab.average(fs/f4),r4,pylab.std((fs/f4)/pylab.average(fs/f2))
print "average ratios ap 3",pylab.average(fs/f3),pylab.median(fs/f3),pylab.std((fs/f3)/pylab.average(fs/f3))
print "average ratios ap 2",pylab.average(fs/f2),pylab.median(fs/f2),pylab.std((fs/f2)/pylab.average(fs/f2))
s='f4 w/apcor = %3.2f(%4.2f)'%(r4,pylab.average(abs(fs-f4*r4)/fs))
pylab.plot(fs,f4*r4,'b.',label=s)
pylab.plot(fs,f4,'bo',label='f4')
s='f3 w/apcor = %3.2f(%4.2f)'%(r3,pylab.average(abs(fs-f3*r3)/fs))
pylab.plot(fs,f3*r3,'g.',label=s)
pylab.plot(fs,f3,'go',label='f3')
s='f2 w/apcor = %3.2f(%4.2f)'%(r2,pylab.average(abs(fs-f2*r2)/fs))
pylab.plot(fs,f2*r2,'r.',label=s)
pylab.plot(fs,f2,'ro',label='f2')
#pylab.plot(fs,f1,'co',label='f1')
#pylab.plot(fs,f242,'k.',label='f24')
pylab.legend(loc='best')
x=N.arange(0.,max(fs),10.)
y=x
pylab.plot(x,y,'k-')
#y=2.*x
#pylab.plot(x,y,'k--')
#y=3.*x
#pylab.plot(x,y,'k--')
#y=4.*x
#pylab.plot(x,y,'k--')
#y=5.*x
#pylab.plot(x,y,'k--')
pylab.xlabel('F(24) Input')
pylab.ylabel('F(24) measured')
#pylab.axis([0.,50.,0.,50.])
s=str(prefix)+'fluxcomp.eps'
pylab.savefig(s)
pylab.cla()
pylab.clf()
nbins=20
fmin=10.#min(fsim)
fmax=max(fsim)
df=5.#(fmax-fmin)/(1.*nbins)
bins=N.arange(fmin,(fmax+df),df)
(xbin,ybin,ybinerr)=mystuff.completeness(bins,fsim,matchflagsim)
s=str(prefix)+'FracComplvsFlux.dat'
outdat=open(s,'w')
print "Completeness vs Input Flux"
for i in range(len(xbin)):
print i, xbin[i],ybin[i],ybinerr[i]
t='%8.2f %8.4f %8.4f\n'%(xbin[i],ybin[i],ybinerr[i])
outdat.write(t)
outdat.close()
#for i in range(len(fsim)):
#if snrsim[i] > 3.:
# print i, fsim[i],matchflagsim[i],deblendsim[i],abs(fsim[i]-fmeas4[i]*1.67)/fsim[i],snrsim[i]
#(xbin,ybin2,ybin2err)=mystuff.scipyhist2(bins,fmeas4)
#pylab.plot(xbin,ybin,'bo')
#pylab.plot(xbin,ybin2,'ro')
#s=str(prefix)+'NDetectvsFlux.eps'
#pylab.savefig(s)
pylab.cla()
pylab.clf()
pylab.plot(xbin,ybin,'ko')
pylab.errorbar(xbin,ybin,yerr=ybinerr,fmt=None,ecolor='k')
s=str(prefix)+'FracComplvsFlux.eps'
pylab.axhline(y=1.0,ls='-')
pylab.axhline(y=.8,ls='--')
pylab.axvline(x=80.0,ls=':',color='b')
pylab.xlabel('Input Flux (uJy)')
pylab.ylabel('Completeness')
pylab.axis([0.,max(xbin)+df,-.05,1.05])
pylab.savefig(s)
|
rfinn/LCS
|
paper1code/LCScompleteness24.py
|
Python
|
gpl-3.0
| 14,141
|
[
"Galaxy"
] |
06dfc505b566eaec47baaea44f751000f4d61a05632405497891264059525f7b
|
import tomviz.operators
class LabelObjectAttributes(tomviz.operators.CancelableOperator):
def transform_scalars(self, dataset):
"""Computes certain attributes of labeled objects, including surface
area, volume, surface-area-to-volume ratio, and centroid.
"""
# Initial progress
self.progress.value = 0
self.progress.maximum = 100
# Approximate percentage of work completed after each step in the
# transform
STEP_PCT = [10, 20, 80, 90, 100]
try:
import vtk
from tomviz import itkutils
from tomviz import utils
except Exception as exc:
print("Could not import necessary module(s)")
raise exc
returnValues = None
scalarType = dataset.GetScalarType()
if scalarType == vtk.VTK_FLOAT or scalarType == vtk.VTK_DOUBLE:
raise Exception(
"Label Object Attributes works only on \
images with integral types.")
try:
self.progress.value = STEP_PCT[0]
self.progress.message = "Computing label object attributes"
# Set up arrays to hold the shape attribute data
def progress_func(fraction):
self.progress.value = \
int(fraction * (STEP_PCT[2] - STEP_PCT[1]) + STEP_PCT[1])
return self.canceled
shape_label_map = itkutils. \
get_label_object_attributes(dataset, progress_func)
if shape_label_map is None:
return returnValues
num_label_objects = shape_label_map.GetNumberOfLabelObjects()
column_names = ['SurfaceArea', 'Volume', 'SurfaceAreaToVolumeRatio']
import numpy as np
# num_label_objects rows, 3 columns
table = np.zeros((num_label_objects, len(column_names)))
self.progress.message = "Computing attribute values"
for i in range(0, num_label_objects):
label_object = shape_label_map.GetNthLabelObject(i)
surface_area = label_object.GetPerimeter()
table[i, 0] = surface_area
volume = label_object.GetPhysicalSize()
table[i, 1] = volume
table[i, 2] = surface_area / volume
self.progress.value = STEP_PCT[3]
self.progress.message = "Creating spreadsheet of results"
# Create a spreadsheet data set from table data
spreadsheet = utils.make_spreadsheet(column_names, table)
# Set up dictionary to return operator results
returnValues = {}
returnValues["component_statistics"] = spreadsheet
self.progress.value = STEP_PCT[4]
except Exception as exc:
print("Problem encountered while running %s" %
self.__class__.__name__)
raise exc
return returnValues
|
cryos/tomviz
|
tomviz/python/LabelObjectAttributes.py
|
Python
|
bsd-3-clause
| 2,976
|
[
"VTK"
] |
b8744b1a97f516e6ed2788429948b32c83ddc168f69e3cd6418332a6b8d5ce55
|
import llvm
import llvm.core
import sys
from functools import reduce
from llvmpy import api
from collections import defaultdict, namedtuple
from .Analysis import Analysis
from .Function import Function
from .Subtask import Subtask
from .AtomicBasicBlock import S, E, ControlFlowEdge
import logging
from .common import Node, Edge, EdgeType, NodeType
class LLVMPYAnalysis(Analysis):
""" Generate an ABB graph from LLVM intermediate code (.ll) """
def __init__(self, files, mergedoutput):
super(LLVMPYAnalysis, self).__init__()
self.files = files
self.outputfile = mergedoutput
def requires(self):
return ["read-oil"]
pass_alias = "llvmpy"
def do(self):
""" Constructor links all files together and produces an ABB graph representation """
if len(self.files) > 10:
logging.info("reading %d .ll files", len(self.files))
else:
logging.info("reading %s", self.files)
self.source_module = self.__combine_source_modules(self.files)
self.__split_basic_blocks_at_calls()
self.__add_kickoff_to_subtask_entries()
# Build a dict with key: function, value: list of BBs
self.__functions = self.__transform()
self.__setupCFG()
self.__add_basic_blocks()
# Write out source_module
print(self.source_module, file=self.outputfile)
@property
def functions(self):
""" Returns a dictionary having a function as key, and a list of BBs as value """
return self.__functions
def get_source(self):
""" Returns the LLVMPY source module object """
return self.source_module
def __transform(self):
""" Transform bind LLVM Functions to our own representation """
bbid = 0
funcs = defaultdict(list)
for function in self.source_module.functions:
for bb in function.basic_blocks:
mbb = BB(bb, bbid)
bbid += 1
funcs[function].append(mbb)
return funcs
def __setupCFG(self):
""" Build the CFG using the llvmpy basic blocks """
for fname, bbs in self.functions.items():
blocks = {}
for basic_block in bbs:
program_code = str(basic_block.llvmbb)
assert not program_code in blocks, "Duplicate block contents %s"% program_code
blocks[program_code] = basic_block
for basic_block in bbs:
for successor in basic_block.get_successors():
targetbb = blocks[str(successor)]
basic_block.add_cfg_edge(targetbb, E.basicblock_level)
def __combine_source_modules(self, files):
""" Combine and link all source modules to a single module """
source_modules = []
# Load all source modules
for filename in files:
source_modules.append(llvm.core.Module.from_assembly(open(filename)))
# Link them all together
for idx in range(1, len(source_modules)):
source_modules[0].link_in(source_modules[idx])
return source_modules[0]
def __add_kickoff_to_subtask_entries(self):
""" Add a call to kickoff at each entry of a user (sub)task """
int_ty = llvm.core.Type.int(32) # llvmpy was picky on generating 'void'
func_ty = llvm.core.Type.function(int_ty, [int_ty])
kickoff = llvm.core.Function.new(self.source_module, func_ty, "kickoff")
arg = llvm.core.Constant.int(int_ty, 0)
# Traverse list of subtasks from the systemdescription
for st in self.system_graph.subtasks:
if st.is_real_thread(): # omit Alarmhandler. These are generated later.
# Get corresponding llvmpy function object
func = self.source_module.get_function_named(st.function_name)
# Add magic call to kickoff at the beginning
entrybb = func.entry_basic_block
# Build a call to magic kickoff routine
bldr = llvm.core.Builder.new(entrybb)
bldr.position_at_beginning(entrybb)
bldr.call(kickoff, [arg], "kickoff_call_%s" % func.name)
def __split_basic_blocks_at_calls(self):
""" Splits up the basic blocks at function calls """
wasCall = False
for function in self.source_module.functions:
splitCounter = 0
for bb in function.basic_blocks:
instList = bb.instructions
bb = bb._ptr
while instList:
inst = instList[0]
del instList[0]
if type(inst) == llvm.core.CallOrInvokeInstruction:
wasCall = True
if wasCall:
bb = api.llvm.BasicBlock.splitBasicBlock(bb,inst._ptr, "%s_split_%d" % (function.name,splitCounter))
splitCounter += 1
if not type(inst) == llvm.core.CallOrInvokeInstruction:
wasCall = False
#print("Transformed:\n", str(self.source_module))
def __add_basic_blocks(self):
graph = self.system_graph
# Gather functions
for llvmfunc,llvmbbs in self.functions.items():
function = graph.find(Function, llvmfunc.name)
if function == None:
# Not existing yet, just add it...
function = Function(llvmfunc.name)
graph._functions[llvmfunc.name] = function
# Add llvm function object
function.set_llvm_function(llvmfunc)
# Add ABBs
for bb in llvmbbs:
abb = graph.new_abb([bb])
function.add_atomic_basic_block(abb)
# and set entry abb
if bb.llvmbb is llvmfunc.entry_basic_block:
function.set_entry_abb(abb)
# type If the flag dOSEK_IGNORE_INTERRUPT_SYSCALLS
# is set, we make all interrupt control system
# calls to computation blocks.
if "dOSEK_IGNORE_INTERRUPT_SYSCALLS" in graph.conf:
if bb.syscall in [S.DisableAllInterrupts, S.EnableAllInterrupts,
S.SuspendOSInterrupts, S.ResumeOSInterrupts,
S.SuspendAllInterrupts, S.ResumeAllInterrupts]:
bb.syscall = S.computation
# make it a syscall and add arguments
if bb.is_syscall():
abb.make_it_a_syscall(bb.get_syscall(), bb.get_syscall_arguments())
# Rename syscall in llvm IR, appending ABB id
bb.rename_syscall(abb, self.get_source())
# Add all implicit intra function control flow graphs
for func in graph.functions:
for abb in func.abbs:
exit_bb = abb.get_exit_bb()
if not exit_bb:
#logging.info("llvmpy_analysis, intra function CFG -> skipping: %s", abb.dump())
continue
nextbbs = exit_bb.get_outgoing_nodes(E.basicblock_level)
for bb in nextbbs:
nextabb = bb.get_parent_ABB()
abb.add_cfg_edge(nextabb, E.function_level)
# Remove Dangling Blocks that have no incoming blocks
# edges, but aren't the entry block. It seems llvm does
# generate such blocks.
for abb in func.abbs:
if len(abb.get_incoming_nodes(E.function_level)) == 0 \
and abb != func.entry_abb:
func.remove_abb(abb)
# Find all return blocks for functions
for function in graph.functions:
ret_abbs = []
for abb in function.abbs:
if len(abb.get_outgoing_edges(E.function_level)) == 0:
ret_abbs.append(abb)
if len(ret_abbs) == 0:
logging.info("Endless loop in %s", function)
elif len(ret_abbs) > 1:
# Add an artificial exit block
abb = graph.new_abb()
function.add_atomic_basic_block(abb)
for ret in ret_abbs:
ret.add_cfg_edge(abb, E.function_level)
function.set_exit_abb(abb)
else:
function.set_exit_abb(ret_abbs[0])
if isinstance(function, Subtask) and function.conf.is_isr:
if not function.exit_abb or not function.exit_abb.isA(S.iret):
# All ISR function get an additional iret block
iret = graph.new_abb()
function.add_atomic_basic_block(iret)
iret.make_it_a_syscall(S.iret, [function])
function.exit_abb.add_cfg_edge(iret, E.function_level)
function.set_exit_abb(iret)
# Gather all called Functions in the ABBs, this has to be done, after all ABBs are present
for abb in graph.abbs:
called_funcs = set()
# Visit all BBs and gather all called Functions
for bb in abb.get_basic_blocks():
if bb.calls_function():
callee = graph.find(Function, bb.calledFunc.name)
if callee:
called_funcs.add(callee)
abb.called_functions.add(callee)
# Populate function level set of called functions, needed in ABBMergePass
abb.function.called_functions.update(called_funcs)
class BB(Node):
""" Our own BasicBlock representation, hiding the ugly llvmpy bindings """
def __init__(self, llvmbb, bb_id):
Node.__init__(self, ControlFlowEdge, "BB%d" %(bb_id), color="yellow")
self.llvmbb = llvmbb
self.successors = self.__find_successors()
self.syscall = S.fromString("create_computation_type_as_default")
self.syscallarguments = []
self.callInst = None
self.calledFunc = None
self.__find_syscall()
self.parent_ABB = None
def __find_successors(self):
successors = []
terminator = self.get_terminator()
if terminator:
succnum = terminator.getNumSuccessors()
for i in range(succnum):
succ = terminator.getSuccessor(i)
successors.append(succ)
return successors
def __extract_event_operand(self, argument):
if hasattr(argument, "z_ext_value"):
# If an integer is given
return [argument.z_ext_value]
elif argument.opcode == llvm.core.OPCODE_LOAD:
x = argument.operands[0].name
return [x]
elif argument.opcode == llvm.core.OPCODE_OR:
arg0 = self.__extract_event_operand(argument.operands[0])
arg1 = self.__extract_event_operand(argument.operands[1])
return arg0 + arg1
assert False, "We cannot extract the Event Mask statically"
def __extract_system_object_operand(self, argument):
if type(argument) in (list, tuple, str):
return argument
if hasattr(argument, "opcode") and argument.opcode == llvm.core.OPCODE_LOAD:
x = argument.operands[0].name
return x
return None
def __find_syscall(self):
""" Extract System Call if present in this basic block """
for inst in self.instructions:
if type(inst) == llvm.core.CallOrInvokeInstruction:
calledfunc = inst.called_function
if calledfunc:
self.syscall = S.fromString(calledfunc.name) # Store type
self.calledFunc = calledfunc # save called LLVMpy function object
self.callInst = inst # save calling LLVMpy instruction
if self.is_syscall():
# Copy the List
args = list(inst.operands[0:-1])
# Extract the Event arguments
if self.syscall in (S.WaitEvent, S.ClearEvent, S.GetEvent):
args[0] = self.__extract_event_operand(args[0])
if self.syscall in (S.SetEvent,):
args[1] = self.__extract_event_operand(args[1])
for op in args:
opstring = self.__extract_system_object_operand(op)
self.syscallarguments.append(opstring)
""" Attention: Here we stop at the first found call!
This is ok, as we split up every call into a single BB """
return
def rename_syscall(self, abb, source_module):
""" Rename syscall to a unique name, appending ABB#.
This has to be done after ABB generation.
"""
assert not abb == None
if self.syscall.name == 'StartOS': # StartOS is not renamed
return
newname = 'OSEKOS_' + self.syscall.name + '__ABB' + str(abb.id())
newfun = llvm.core.Function.new(source_module, self.calledFunc._ptr.getFunctionType(), newname)
self.callInst.called_function = newfun
def set_parent_ABB(self, parent):
""" Set parent Atomic Basic Block which includes this basic block """
self.parent_ABB = parent
def get_parent_ABB(self):
""" Returns the Atomic Basic Blocks in which this basic block resides """
return self.parent_ABB
def get_call_instruction(self):
return self.callInst
def get_called_function(self):
return self.calledFunc
def calls_function(self):
return not self.calledFunc == None
def is_syscall(self):
""" Is this basic block actually a system call? """
return self.syscall.isRealSyscall()
def get_syscall(self):
""" Well, returns the system call """
assert self.is_syscall()
return self.syscall
def get_syscall_arguments(self):
""" Returns a list of system call arguments """
assert self.is_syscall()
return self.syscallarguments
def get_successors(self):
""" Return a list of succeeding basic blocks """
return self.successors
def get_parent(self):
""" Get the parent llvm basic block """
return self.llvmbb._ptr.getParent()
def get_terminator(self):
""" Get terminating basic block """
return self.llvmbb._ptr.getTerminator()
def split_basic_block(self, instruction, newlabel):
""" Split basic block a the given instruction, adding the label newlabel """
return api.llvm.BasicBlock.splitBasicBlock(self.llvmbb._ptr, instruction._ptr, newlabel)
@property
def instructions(self):
""" Returns the instructions of the corresponding basic block """
return self.llvmbb.instructions
def __str__(self):
""" The string representation of the basic block in llvm IR syntax """
return str(self.llvmbb)
|
danceos/dosek
|
generator/analysis/LLVMPYAnalysis.py
|
Python
|
lgpl-3.0
| 15,053
|
[
"VisIt"
] |
c7c23ae63e35c727c27be2cf9428e28e334fc7ad77a1d0ac30c58cc7a406a850
|
#
# This file is part of jetflows.
#
# Copyright (C) 2014, Henry O. Jacobs (hoj201@gmail.com), Stefan Sommer (sommer@di.ku.dk)
# https://github.com/nefan/jetflows.git
#
# jetflows is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jetflows is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with jetflows. If not, see <http://www.gnu.org/licenses/>.
#
"""
Wrapper for gaussian.py
"""
import numpy as np
import gaussian
N = None
DIM = None
SIGMA = None
def Gaussian_monomial( x , n ):
# computes x/sigma^n * G(x)
y = x / SIGMA
store = y * np.exp( -(0.5/n) * y**2 )
return store**n
def diff_1D_Gaussian_cpp(x,k,SIGMA,parallel=False):
s = x.shape
out = np.zeros(x.size)
gaussian.diff_1D_Gaussian_parallel_cpp(x.flatten(),out,k,SIGMA,parallel)
return out.reshape(s)
def diff_1D_Gaussian( x , k ):
# returns the kth derivative of a 1 dimensional Guassian
G = np.exp( -0.5 * (x / SIGMA)**2 )
if k == 0:
return G
elif k==1:
return -1.*Gaussian_monomial(x,1) / (SIGMA)
elif k==2:
return ( Gaussian_monomial(x,2) - G ) / (SIGMA**2)
elif k==3:
return -1.*( Gaussian_monomial(x,3) - 3.*Gaussian_monomial(x,1)) / (SIGMA**3)
elif k==4:
return (Gaussian_monomial(x,4) - 6.*Gaussian_monomial(x,2) + 3.*G ) / (SIGMA**4)
elif k==5:
return (-1.*(Gaussian_monomial(x,5) - 10.*Gaussian_monomial(x,3) + 15.*Gaussian_monomial(x,1) ))/(SIGMA**5)
elif k==6:
return (Gaussian_monomial(x,6) - 15.*Gaussian_monomial(x,4) + 45.*Gaussian_monomial(x,2) -15.*G)/(SIGMA**6)
else:
print 'error in diff_1D_Guassian: k='+str(k)
return 'error'
def derivatives_of_Gaussians( p1 , p2, parallel=False ):
N_p1 = p1.shape[0]
N_p2 = p2.shape[0]
r_sq = np.zeros( [ N_p1 , N_p2 ] )
dx = np.zeros( [N_p1,N_p2,DIM] )
for a in range(0,DIM):
dx[:,:,a] = np.outer( p1[:,a] , np.ones(N_p2) ) - np.outer( np.ones(N_p1), p2[:,a] )
r_sq[:,:] = dx[:,:,a]**2 + r_sq[:,:]
G = np.exp( - r_sq / (2.*SIGMA**2) )
DG = np.ones( [N_p1,N_p2,DIM] )
D2G = np.ones( [N_p1,N_p2,DIM,DIM] )
D3G = np.ones( [N_p1,N_p2,DIM,DIM,DIM] )
D4G = np.ones( [N_p1,N_p2,DIM,DIM,DIM,DIM] )
D5G = np.ones( [N_p1,N_p2,DIM,DIM,DIM,DIM,DIM] )
D6G = np.ones( [N_p1,N_p2,DIM,DIM,DIM,DIM,DIM,DIM] )
alpha = np.int_(np.zeros(DIM))
#one derivative
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
#DG[:,:,a] = DG[:,:,a]*diff_1D_Gaussian( dx[:,:,b] , alpha[b] )
DG[:,:,a] = DG[:,:,a]*diff_1D_Gaussian_cpp( dx[:,:,b] , alpha[b], SIGMA, parallel )
alpha[a] = 0
#two derivatives
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
alpha[b] = alpha[b] + 1
for c in range(0,DIM):
#D2G[:,:,a,b] = D2G[:,:,a,b]*diff_1D_Gaussian( dx[:,:,c] , alpha[c] )
D2G[:,:,a,b] = D2G[:,:,a,b]*diff_1D_Gaussian_cpp( dx[:,:,c] , alpha[c], SIGMA, parallel )
alpha[b] = alpha[b] - 1
alpha[a] = 0
#three derivatives
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
alpha[b] = alpha[b] + 1
for c in range(0,DIM):
alpha[c] = alpha[c] + 1
for d in range(0,DIM):
#D3G[:,:,a,b,c] = D3G[:,:,a,b,c]*diff_1D_Gaussian( dx[:,:,d] , alpha[d] )
D3G[:,:,a,b,c] = D3G[:,:,a,b,c]*diff_1D_Gaussian_cpp( dx[:,:,d] , alpha[d], SIGMA, parallel )
alpha[c] = alpha[c] - 1
alpha[b] = alpha[b] - 1
alpha[a] = 0
#four derivatives
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
alpha[b] = alpha[b] + 1
for c in range(0,DIM):
alpha[c] = alpha[c] + 1
for d in range(0,DIM):
alpha[d] = alpha[d] + 1
for e in range(0,DIM):
#D4G[:,:,a,b,c,d] = D4G[:,:,a,b,c,d]*diff_1D_Gaussian( dx[:,:,e] , alpha[e] )
D4G[:,:,a,b,c,d] = D4G[:,:,a,b,c,d]*diff_1D_Gaussian_cpp( dx[:,:,e] , alpha[e], SIGMA, parallel )
alpha[d] = alpha[d] - 1
alpha[c] = alpha[c] - 1
alpha[b] = alpha[b] - 1
alpha[a] = 0
#five derivatives
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
alpha[b] = alpha[b] + 1
for c in range(0,DIM):
alpha[c] = alpha[c] + 1
for d in range(0,DIM):
alpha[d] = alpha[d] + 1
for e in range(0,DIM):
alpha[e] = alpha[e] + 1
for f in range(0,DIM):
#D5G[:,:,a,b,c,d,e] = D5G[:,:,a,b,c,d,e]*diff_1D_Gaussian( dx[:,:,f] , alpha[f] )
D5G[:,:,a,b,c,d,e] = D5G[:,:,a,b,c,d,e]*diff_1D_Gaussian_cpp( dx[:,:,f] , alpha[f], SIGMA, parallel )
alpha[e] = alpha[e] - 1
alpha[d] = alpha[d] - 1
alpha[c] = alpha[c] - 1
alpha[b] = alpha[b] - 1
alpha[a] = 0
#six derivatives
for a in range(0,DIM):
alpha[a] = 1
for b in range(0,DIM):
alpha[b] = alpha[b] + 1
for c in range(0,DIM):
alpha[c] = alpha[c] + 1
for d in range(0,DIM):
alpha[d] = alpha[d] + 1
for e in range(0,DIM):
alpha[e] = alpha[e] + 1
for f in range(0,DIM):
alpha[f] = alpha[f] + 1
for g in range(0,DIM):
#D6G[:,:,a,b,c,d,e,f] = D6G[:,:,a,b,c,d,e,f]*diff_1D_Gaussian( dx[:,:,g] , alpha[g] )
D6G[:,:,a,b,c,d,e,f] = D6G[:,:,a,b,c,d,e,f]*diff_1D_Gaussian_cpp( dx[:,:,g] , alpha[g], SIGMA, parallel )
alpha[f] = alpha[f] - 1
alpha[e] = alpha[e] - 1
alpha[d] = alpha[d] - 1
alpha[c] = alpha[c] - 1
alpha[b] = alpha[b] - 1
alpha[a] = 0
return G, DG, D2G, D3G, D4G, D5G , D6G
|
stefansommer/dpca
|
code/landmarks/kernels/pyGaussian.py
|
Python
|
gpl-3.0
| 6,717
|
[
"Gaussian"
] |
bcf134b807f23f0c034718c871d5098202a328773ce837fe90cc4a8f723dcf4a
|
# -*- coding: utf-8 -*-
"""
Create an initial ATP Profile for an ECs Mesh and write it out as .vtp.
"""
import os
import sys
# Run in current directory.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Import path for the GenerateATPMap script.
importPath = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../util'))
if not importPath in sys.path:
sys.path.insert(1, importPath)
del importPath
import GenerateATPMapV2
# This is for the c8064 mesh.
GenerateATPMapV2.centrelineFile = "c8064Centreline.vtk"
GenerateATPMapV2.meshFile = "quadMeshFullECc8064.vtp"
GenerateATPMapV2.debugAtpFile = "quadMeshFullATPV2c8064.vtp"
GenerateATPMapV2.atpFile = "quadMeshFullATPc8064.vtp"
GenerateATPMapV2.numBranches = 3
GenerateATPMapV2.numQuads = 8064
GenerateATPMapV2.numAxialQuads = 64
GenerateATPMapV2.numECsPerCol = 4
GenerateATPMapV2.atpGradient = 3.3
GenerateATPMapV2.atpMin = 0.1
GenerateATPMapV2.atpMax = 1.0
def main():
GenerateATPMapV2.buildATPMesh()
if __name__ == '__main__':
print "Starting", os.path.basename(__file__)
main()
print "Exiting", os.path.basename(__file__)
|
BlueFern/DBiharMesher
|
meshes/c8064/Generate8064ATPMapV2.py
|
Python
|
gpl-2.0
| 1,119
|
[
"VTK"
] |
510f786a8cfa96a7c492df38e9b3a79241d59e7eaf9bfe82b2163c351fd8c5c5
|
#
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2016, 2016, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
import mx
from argparse import ArgumentParser
_microbench_executor = None
def set_microbenchmark_executor(ex):
global _microbench_executor
assert _microbench_executor is None, 'cannot override microbenchmark executor twice'
_microbench_executor = ex
def get_microbenchmark_executor():
if not _microbench_executor:
set_microbenchmark_executor(MicrobenchExecutor())
return _microbench_executor
class MicrobenchExecutor(object):
def microbench(self, args):
"""run JMH microbenchmark projects"""
parser = ArgumentParser(prog='mx microbench', description=microbench.__doc__,
usage="%(prog)s [command options|VM options] [-- [JMH options]]")
parser.add_argument('--jar', help='Explicitly specify micro-benchmark location')
self.add_arguments(parser)
known_args, args = parser.parse_known_args(args)
vmArgs, jmhArgs = mx.extract_VM_args(args, useDoubleDash=True)
vmArgs = self.parseVmArgs(vmArgs)
# look for -f in JMH arguments
forking = True
for i in range(len(jmhArgs)):
arg = jmhArgs[i]
if arg.startswith('-f'):
if arg == '-f' and (i+1) < len(jmhArgs):
arg += jmhArgs[i+1]
try:
if int(arg[2:]) == 0:
forking = False
except ValueError:
pass
if known_args.jar:
# use the specified jar
args = ['-jar', known_args.jar]
if not forking:
args += vmArgs
else:
# find all projects with a direct JMH dependency
jmhProjects = []
for p in mx.projects_opt_limit_to_suites():
if 'JMH' in [x.name for x in p.deps]:
jmhProjects.append(p.name)
cp = mx.classpath(jmhProjects)
# execute JMH runner
args = ['-cp', cp]
if not forking:
args += vmArgs
args += ['org.openjdk.jmh.Main']
if forking:
def quoteSpace(s):
if " " in s:
return '"' + s + '"'
return s
forkedVmArgs = map(quoteSpace, self.parseForkedVmArgs(vmArgs))
args += ['--jvmArgsPrepend', ' '.join(forkedVmArgs)]
self.run_java(args + jmhArgs)
def add_arguments(self, parser):
pass
def run_java(self, args):
mx.run_java(args)
def parseVmArgs(self, vmArgs):
return vmArgs
def parseForkedVmArgs(self, vmArgs):
return vmArgs
def microbench(args):
get_microbenchmark_executor().microbench(args)
|
olpaw/mx
|
mx_microbench.py
|
Python
|
gpl-2.0
| 3,958
|
[
"VisIt"
] |
759b8b57e60646b83f6446212b4f4ccc86a2d262e0c1d370e2d8212d25a1870c
|
#
# Copyright 2014, 2020 James Kermode (Warwick U.)
# 2019 James Brixey (Warwick U.)
# 2015 Punit Patel (Warwick U.)
# 2014 Lars Pastewka (U. Freiburg)
#
# matscipy - Materials science with Python at the atomic-scale
# https://github.com/libAtoms/matscipy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import itertools
import functools
import numpy as np
from numpy.linalg import norm, inv
def gcd(a, b):
"""Calculate the greatest common divisor of a and b"""
while b:
a, b = b, a%b
return a
class MillerIndex(np.ndarray):
"""
Representation of a three of four index Miller direction or plane
A :class:`MillerIndex` can be constructed from vector or parsed from a string::
x = MillerIndex('-211')
y = MillerIndex('111', type='plane')
z = x.cross(y)
print x # prints "[-211]"
print y # prints "(111)", note round brackets denoting a plane
print z.latex()
assert(angle_between(x,y) == pi/2.)
assert(angle_between(y,z) == pi/2.)
assert(angle_between(x,z) == pi/2.)
"""
__array_priority__ = 101.0
brackets = {'direction': '[]',
'direction_family': '<>',
'plane': '()',
'plane_family': '{}'}
all_brackets = list(itertools.chain(*brackets.values()))
def __new__(cls, v=None, type='direction'):
if isinstance(v, str):
v = MillerIndex.parse(v)
if len(v) == 3 or len(v) == 4:
self = np.ndarray.__new__(cls, len(v))
self[:] = v
else:
raise ValueError('%s input v should be of length 3 or 4' % cls.__name__)
self.type = type
self.simplify()
return self
def __array_finalize__(self, obj):
if obj is None:
return
self.type = getattr(obj, 'type', 'direction')
def __repr__(self):
return ('%s(['+'%d'*len(self)+'])') % ((self.__class__.__name__,) + tuple(self))
def __str__(self):
bopen, bclose = MillerIndex.brackets[self.type]
return (bopen+'%d'*len(self)+bclose) % tuple(self)
def latex(self):
"""
Format this :class:`MillerIndex` as a LaTeX string
"""
s = '$'
bopen, bclose = MillerIndex.brackets[self.type]
s += bopen
for component in self:
if component < 0:
s += r'\bar{%d}' % abs(component)
else:
s += '%d' % component
s += bclose
s += '$'
return s
@classmethod
def parse(cls, s):
r"""
Parse a Miller index string
Negative indices can be denoted by:
1. leading minus sign, e.g. ``[11-2]``
2. trailing ``b`` (for 'bar'), e.g. ``112b``
3. LaTeX ``\bar{}``, e.g. ``[11\bar{2}]`` (which renders as :math:`[11\bar{2}]` in LaTeX)
Leading or trailing brackets of various kinds are ignored.
i.e. ``[001]``, ``{001}``, ``(001)``, ``[001]``, ``<001>``, ``001`` are all equivalent.
Returns an array of components (i,j,k) or (h,k,i,l)
"""
if not isinstance(s, str):
raise TypeError("Can't parse from %r of type %r" % (s, type(s)))
orig_s = s
for (a, b) in [(r'\bar{','-')] + [(b,'') for b in MillerIndex.all_brackets]:
s = s.replace(a, b)
L = list(s)
components = np.array([1,1,1,1]) # space for up to 4 elements
i = 3 # parse backwards from end of string
while L:
if i < 0:
raise ValueError('Cannot parse Miller index from string "%s", too many components found' % orig_s)
c = L.pop()
if c == '-':
if i == 3:
raise ValueError('Miller index string "%s" cannot end with a minus sign' % orig_s)
components[i+1] *= -1
elif c == 'b':
components[i] *= -1
elif c in ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']:
components[i] *= int(c)
i -= 1
else:
raise ValueError('Unexpected character "%s" in miller index string "%s"' % (c, orig_s))
if i == 0:
return components[1:]
elif i == -1:
return components
else:
raise ValueError('Cannot parse Miller index from string %s, too few components found' % orig_s)
self.simplify()
def simplify(self):
"""
Simplify by dividing through by greatest common denominator
"""
d = abs(functools.reduce(gcd, self))
self[:] /= d
def simplified(self):
copy = self.copy()
copy.simplify()
return copy
def norm(self):
return np.linalg.norm(self)
def normalised(self):
a = self.as3()
return np.array(a, dtype=float)/a.norm()
hat = normalised
def cross(self, other):
a = self.as3()
b = MillerIndex(other).as3()
return np.cross(a, b).view(MillerIndex).simplified()
def cosine(self, other):
other = MillerIndex(other)
return np.dot(self.normalised(), other.normalised())
def angle(self, other):
return np.arccos(self.cosine(other))
def as4(self):
if len(self) == 4:
return self
else:
h, k, l = self
i = -(h+l)
return MillerIndex((h,k,i,l))
def as3(self):
if len(self) == 3:
return self
else:
h, k, i, l = self
return MillerIndex((h, k, l))
def plane_spacing(self, a):
return a/self.as3().norm()
def MillerPlane(v):
"""Special case of :class:`MillerIndex` with ``type="plane"``"""
return MillerIndex(v, 'plane')
def MillerDirection(v):
"""Special case of :class:`MillerIndex` with ``type="direction"`` (the default)"""
return MillerIndex(v, 'direction')
def angle_between(a, b):
"""Angle between crystallographic directions between a=[ijk] and b=[lmn], in radians."""
return MillerIndex(a).angle(b)
def make_unit_slab(unit_cell, axes):
"""
General purpose unit slab creation routine
Only tested with cubic unit cells.
Code translated from quippy.structures.unit_slab()
https://github.com/libAtoms/QUIP/blob/public/src/libAtoms/Structures.f95
Arguments
---------
unit_cell : Atoms
Atoms object containing primitive unit cell
axes: 3x3 array
Miller indices of desired slab, as columns
Returns
-------
slab : Atoms
Output slab, with axes aligned with x, y, z.
"""
a1 = axes[:,0]
a2 = axes[:,1]
a3 = axes[:,2]
rot = np.zeros((3,3))
rot[0,:] = a1/norm(a1)
rot[1,:] = a2/norm(a2)
rot[2,:] = a3/norm(a3)
pos = unit_cell.get_positions().T
lattice = unit_cell.get_cell().T
lattice = np.dot(rot, lattice)
at = unit_cell.copy()
at.set_positions(np.dot(rot, pos).T)
at.set_cell(lattice.T)
sup = at * (5,5,5)
sup.positions[...] -= sup.positions.mean(axis=0)
sup_lattice = np.zeros((3,3))
for i in range(3):
sup_lattice[:,i] = (axes[0,i]*lattice[:,0] +
axes[1,i]*lattice[:,1] +
axes[2,i]*lattice[:,2])
sup.set_cell(sup_lattice.T, scale_atoms=False)
# Form primitive cell by discarding atoms with
# lattice coordinates outside range [-0.5,0.5]
d = [0.01,0.02,0.03] # Small shift to avoid conincidental alignments
i = 0
g = inv(sup_lattice)
sup_pos = sup.get_positions().T
while True:
t = np.dot(g, sup_pos[:, i] + d)
if (t <= -0.5).any() | (t >= 0.5).any():
del sup[i]
sup_pos = sup.get_positions().T
i -= 1 # Retest since we've removed an atom
if i == len(sup)-1:
break
i += 1
sup.set_scaled_positions(sup.get_scaled_positions())
return sup
|
libAtoms/matscipy
|
matscipy/surface.py
|
Python
|
lgpl-2.1
| 8,658
|
[
"Matscipy"
] |
8721793ca0aac01c7bb5e063cffecfe90c4cb44e4b286a1279fc63054b8d5988
|
#!/usr/bin/env python3
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import os
import sys
class Visitor(ast.NodeVisitor):
def visit(self, node):
try:
docstring = ast.get_docstring(node)
except TypeError:
docstring = None
if docstring:
print(docstring)
print('---')
super().visit(node)
for d, _, filenames in os.walk('.'):
for name in filenames:
if not name.endswith('.py'): continue
p = os.path.join(d, name)
print(p)
with open(p) as f:
Visitor().visit(ast.parse(f.read()))
print('\n')
|
GoogleCloudPlatform/hadoop-discovery-tool
|
code_release.py
|
Python
|
apache-2.0
| 1,099
|
[
"VisIt"
] |
f2c83933b3ed7cb1ac4ca5329187df390a78ed7b5c36da2c04e9a0a49acae3c5
|
# -*- coding: utf-8 -*-
# Copyright 2010-2017, The University of Melbourne
# Copyright 2010-2017, Brian May
#
# Karaage documentation build configuration file, created by
# sphinx-quickstart on Thu Jan 16 14:28:57 2014.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# -- General configuration ----------------------------------------------------
exec(open("../conf.py", "rb").read())
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Karaage'
copyright = '2017, Brian May'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Karaage-admin'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', 'Karaage.tex', 'Karaage Admin Documentation',
'Brian May', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'karaage',
'Karaage Admin Documentation', [u'Brian May'], 8),
('ref/cmd/kg-manage', 'kg-manage',
'Management for Karaage', [u'Brian May'], 8),
('ref/cmd/kg-set-secret-key', 'kg_set_secret_key',
'Set secret key for Karaage', [u'Brian May'], 8),
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Karaage', 'Karaage Admin Documentation',
'Brian May', 'Karaage',
'Karaage is a cluster account management tool.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# -- Options for Epub output --------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'Karaage Admin Documentation'
epub_author = 'Brian May'
epub_publisher = 'Brian May'
epub_copyright = '2014, Brian May'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
# epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
brianmay/karaage
|
docs/admin/conf.py
|
Python
|
gpl-3.0
| 9,208
|
[
"Brian"
] |
a75af3d5aff7ab86d21da0c3030fb45abe5214a0028362efcbe9b3a456a9e991
|
# coding=utf-8
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for building distributions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
import attr
import numpy as np
from typing import Sequence
@attr.s
class Distribution(object):
"""Base distribution class.
Inheriting classes should fill in the sample method and initialize dim.
"""
dim = attr.ib(init=False)
def sample(self, rng):
raise NotImplementedError
def _check_sum_to_one(instance, attribute, value):
"""Raises ValueError if the value does not sum to one."""
del instance, attribute # Unused.
value = np.array(value)
if not np.isclose(np.sum(value), 1):
raise ValueError("Array must sum to one. Got %s." % np.sum(value))
def _check_nonnegative(instance, attribute, value):
"""Raises ValueError if the value elements are negative."""
del instance, attribute # Unused.
value = np.array(value)
if np.any(value < 0):
raise ValueError("Array must be nonnegative. Got %s." % value)
def _check_in_zero_one_range(instance, attribute, value):
"""Raises ValueError if value is not in [0, 1]."""
del instance, attribute # Unused.
value = np.array(value)
if np.any(value < 0) or np.any(value > 1):
raise ValueError("Value must be in [0, 1]. Got %s." % value)
@attr.s
class Mixture(Distribution):
"""A mixture distribution."""
components = attr.ib(factory=list) # type: Sequence[Distribution]
weights = attr.ib(
factory=list, validator=[_check_sum_to_one,
_check_nonnegative]) # type: Sequence[float]
def sample(self, rng):
logging.debug("Sampling from a mixture with %d components. Weights: %s",
len(self.components), self.weights)
component = rng.choice(self.components, p=self.weights)
return component.sample(rng)
def __attrs_post_init__(self):
for component in self.components:
if component.dim != self.components[0].dim:
raise ValueError("Components do not have the same dimensionality.")
self.dim = self.components[0].dim
@attr.s
class Gaussian(Distribution):
"""A Gaussian Distribution."""
mean = attr.ib()
std = attr.ib()
def __attrs_post_init__(self):
self.dim = len(self.mean)
def sample(self, rng):
return rng.normal(self.mean, self.std)
@attr.s
class Bernoulli(Distribution):
"""A Bernoulli Distribution."""
p = attr.ib(validator=[_check_in_zero_one_range])
def __attrs_post_init__(self):
self.dim = 1
def sample(self, rng):
return rng.rand() < self.p
@attr.s
class Constant(Distribution):
"""A Constant Distribution."""
mean = attr.ib()
def __attrs_post_init__(self):
self.dim = len(self.mean)
def sample(self, rng):
del rng # Unused.
return self.mean
|
google/ml-fairness-gym
|
distributions.py
|
Python
|
apache-2.0
| 3,400
|
[
"Gaussian"
] |
96383b06362e0eaeda28b5b3da644c573776306fa40fc87d632f74b6c717b917
|
"""
An efficient implementation of the triple-plane view showing 3 cut planes
on volumetric data, and side views showing each cut, with a cursor to
move the other cuts.
This is an example of complex callback interaction. It builds on the
:ref:`example_volume_slicer` but has more complex logic. You should try
to understand the :ref:`example_volume_slicer` first.
In this example, the VolumeSlicer object displays a position attribute
giving the position of the cut in data coordinates. Traits callbacks are
used to move the cut planes when this position attribute is modifed.
In the 3D window, the 3D cuts are displayed using ImagePlaneWidgets
cutting the 3D volumetric data. The data extracted by the
ImagePlaneWidgets for plotting is captured using the TVTK
ImagePlaneWidget's `_get_reslice_output` method. The resulting dataset is
plotted in each side view using another ImagePlaneWidget. As a result the
data is not copied (at the VTK level, there is only one pipeline), and
modifications of the data plotted on the planes in the 3D view (for
instance when these planes are moved) are propagated to the 2D side views
by the VTK pipeline.
A cursor is displayed in each side view using a glyph. The cursor
indicates the position of the cut.
In the side view, when the mouse button is pressed on the planes, it
creates a VTK `InteractionEvent`. When this happens, VTK calls an
callback (observer, it VTK terms), that we use to move the position of
the cut. The Traits callbacks do the rest for the updating.
"""
import numpy as np
from traits.api import HasTraits, Instance, Array, \
Bool, Dict, on_trait_change
from traitsui.api import View, Item, HGroup, Group
from tvtk.api import tvtk
from tvtk.pyface.scene import Scene
from mayavi import mlab
from mayavi.core.api import PipelineBase, Source
from mayavi.core.ui.api import SceneEditor, MlabSceneModel
################################################################################
# The object implementing the dialog
class VolumeSlicer(HasTraits):
# The data to plot
data = Array
# The position of the view
position = Array(shape=(3,))
# The 4 views displayed
scene3d = Instance(MlabSceneModel, ())
scene_x = Instance(MlabSceneModel, ())
scene_y = Instance(MlabSceneModel, ())
scene_z = Instance(MlabSceneModel, ())
# The data source
data_src = Instance(Source)
# The image plane widgets of the 3D scene
ipw_3d_x = Instance(PipelineBase)
ipw_3d_y = Instance(PipelineBase)
ipw_3d_z = Instance(PipelineBase)
# The cursors on each view:
cursors = Dict()
disable_render = Bool
_axis_names = dict(x=0, y=1, z=2)
#---------------------------------------------------------------------------
# Object interface
#---------------------------------------------------------------------------
def __init__(self, **traits):
super(VolumeSlicer, self).__init__(**traits)
# Force the creation of the image_plane_widgets:
self.ipw_3d_x
self.ipw_3d_y
self.ipw_3d_z
#---------------------------------------------------------------------------
# Default values
#---------------------------------------------------------------------------
def _position_default(self):
return 0.5*np.array(self.data.shape)
def _data_src_default(self):
return mlab.pipeline.scalar_field(self.data,
figure=self.scene3d.mayavi_scene,
name='Data',)
def make_ipw_3d(self, axis_name):
ipw = mlab.pipeline.image_plane_widget(self.data_src,
figure=self.scene3d.mayavi_scene,
plane_orientation='%s_axes' % axis_name,
name='Cut %s' % axis_name)
return ipw
def _ipw_3d_x_default(self):
return self.make_ipw_3d('x')
def _ipw_3d_y_default(self):
return self.make_ipw_3d('y')
def _ipw_3d_z_default(self):
return self.make_ipw_3d('z')
#---------------------------------------------------------------------------
# Scene activation callbacks
#---------------------------------------------------------------------------
@on_trait_change('scene3d.activated')
def display_scene3d(self):
outline = mlab.pipeline.outline(self.data_src,
figure=self.scene3d.mayavi_scene,
)
self.scene3d.mlab.view(40, 50)
# Interaction properties can only be changed after the scene
# has been created, and thus the interactor exists
for ipw in (self.ipw_3d_x, self.ipw_3d_y, self.ipw_3d_z):
ipw.ipw.interaction = 0
self.scene3d.scene.background = (0, 0, 0)
# Keep the view always pointing up
self.scene3d.scene.interactor.interactor_style = \
tvtk.InteractorStyleTerrain()
self.update_position()
def make_side_view(self, axis_name):
scene = getattr(self, 'scene_%s' % axis_name)
scene.scene.parallel_projection = True
ipw_3d = getattr(self, 'ipw_3d_%s' % axis_name)
# We create the image_plane_widgets in the side view using a
# VTK dataset pointing to the data on the corresponding
# image_plane_widget in the 3D view (it is returned by
# ipw_3d._get_reslice_output())
side_src = ipw_3d.ipw._get_reslice_output()
ipw = mlab.pipeline.image_plane_widget(
side_src,
plane_orientation='z_axes',
vmin=self.data.min(),
vmax=self.data.max(),
figure=scene.mayavi_scene,
name='Cut view %s' % axis_name,
)
setattr(self, 'ipw_%s' % axis_name, ipw)
# Extract the spacing of the side_src to convert coordinates
# into indices
spacing = side_src.spacing
# Make left-clicking create a crosshair
ipw.ipw.left_button_action = 0
x, y, z = self.position
cursor = mlab.points3d(x, y, z,
mode='axes',
color=(0, 0, 0),
scale_factor=2*max(self.data.shape),
figure=scene.mayavi_scene,
name='Cursor view %s' % axis_name,
)
self.cursors[axis_name] = cursor
# Add a callback on the image plane widget interaction to
# move the others
this_axis_number = self._axis_names[axis_name]
def move_view(obj, evt):
# Disable rendering on all scene
position = list(obj.GetCurrentCursorPosition()*spacing)[:2]
position.insert(this_axis_number, self.position[this_axis_number])
# We need to special case y, as the view has been rotated.
if axis_name is 'y':
position = position[::-1]
self.position = position
ipw.ipw.add_observer('InteractionEvent', move_view)
ipw.ipw.add_observer('StartInteractionEvent', move_view)
# Center the image plane widget
ipw.ipw.slice_position = 0.5*self.data.shape[
self._axis_names[axis_name]]
# 2D interaction: only pan and zoom
scene.scene.interactor.interactor_style = \
tvtk.InteractorStyleImage()
scene.scene.background = (0, 0, 0)
# Some text:
mlab.text(0.01, 0.8, axis_name, width=0.08)
# Choose a view that makes sens
views = dict(x=(0, 0), y=(90, 180), z=(0, 0))
mlab.view(views[axis_name][0],
views[axis_name][1],
focalpoint=0.5*np.array(self.data.shape),
figure=scene.mayavi_scene)
scene.scene.camera.parallel_scale = 0.52*np.mean(self.data.shape)
@on_trait_change('scene_x.activated')
def display_scene_x(self):
return self.make_side_view('x')
@on_trait_change('scene_y.activated')
def display_scene_y(self):
return self.make_side_view('y')
@on_trait_change('scene_z.activated')
def display_scene_z(self):
return self.make_side_view('z')
#---------------------------------------------------------------------------
# Traits callback
#---------------------------------------------------------------------------
@on_trait_change('position')
def update_position(self):
""" Update the position of the cursors on each side view, as well
as the image_plane_widgets in the 3D view.
"""
# First disable rendering in all scenes to avoid unecessary
# renderings
self.disable_render = True
# For each axis, move image_plane_widget and the cursor in the
# side view
for axis_name, axis_number in self._axis_names.items():
ipw3d = getattr(self, 'ipw_3d_%s' % axis_name)
ipw3d.ipw.slice_position = self.position[axis_number]
# Go from the 3D position, to the 2D coordinates in the
# side view
position2d = list(self.position)
position2d.pop(axis_number)
if axis_name is 'y':
position2d = position2d[::-1]
# Move the cursor
# For the following to work, you need Mayavi 3.4.0, if you
# have a less recent version, use 'x=[position2d[0]]'
self.cursors[axis_name].mlab_source.set(
x=position2d[0],
y=position2d[1],
z=0)
# Finally re-enable rendering
self.disable_render = False
@on_trait_change('disable_render')
def _render_enable(self):
for scene in (self.scene3d, self.scene_x, self.scene_y,
self.scene_z):
scene.scene.disable_render = self.disable_render
#---------------------------------------------------------------------------
# The layout of the dialog created
#---------------------------------------------------------------------------
view = View(HGroup(
Group(
Item('scene_y',
editor=SceneEditor(scene_class=Scene),
height=250, width=300),
Item('scene_z',
editor=SceneEditor(scene_class=Scene),
height=250, width=300),
show_labels=False,
),
Group(
Item('scene_x',
editor=SceneEditor(scene_class=Scene),
height=250, width=300),
Item('scene3d',
editor=SceneEditor(scene_class=Scene),
height=250, width=300),
show_labels=False,
),
),
resizable=True,
title='Volume Slicer',
)
################################################################################
if __name__ == '__main__':
# Create some data
x, y, z = np.ogrid[-5:5:100j, -5:5:100j, -5:5:100j]
data = np.sin(3*x)/x + 0.05*z**2 + np.cos(3*y)
m = VolumeSlicer(data=data)
m.configure_traits()
|
dmsurti/mayavi
|
examples/mayavi/interactive/volume_slicer_advanced.py
|
Python
|
bsd-3-clause
| 11,548
|
[
"Mayavi",
"VTK"
] |
3afb28f52bb3156d369d461d74825e8362611e147efd02fc20ae238327ff0909
|
import tempfile
from pele.systems import AtomicCluster
from pele.potentials import LJ
from pele.utils.xyz import write_xyz
__all__ = ["LJCluster"]
class LJCluster(AtomicCluster):
"""
define the System class for a Lennard-Jones cluster
Parameters
----------
natoms : int
See Also
--------
BaseSystem, AtomicCluster
"""
def __init__(self, natoms):
super(LJCluster, self).__init__()
self.natoms = natoms
self.params.database.accuracy = 1e-3
self.params.basinhopping["temperature"] = 1.0
# self.params.double_ended_connect.NEBparams.reinterpolate = 1
def get_permlist(self):
return [range(self.natoms)]
def get_potential(self):
return LJ()
def get_system_properties(self):
return dict(natoms=int(self.natoms),
potential="LJ cluster",
)
#
# below here is stuff only for the gui
#
def draw(self, coordslinear, index): # pragma: no cover
"""
tell the gui how to represent your system using openGL objects
Parameters
----------
coords : array
index : int
we can have more than one molecule on the screen at one time. index tells
which one to draw. They are viewed at the same time, so they should be
visually distinct, e.g. different colors. accepted values are 1 or 2
"""
from _opengl_tools import draw_atomic_single_atomtype
draw_atomic_single_atomtype(coordslinear, index, subtract_com=True)
def load_coords_pymol(self, coordslist, oname, index=1): # pragma: no cover
"""load the coords into pymol
the new object must be named oname so we can manipulate it later
Parameters
----------
coordslist : list of arrays
oname : str
the new pymol object must be named oname so it can be manipulated
later
index : int
we can have more than one molecule on the screen at one time. index tells
which one to draw. They are viewed at the same time, so should be
visually distinct, e.g. different colors. accepted values are 1 or 2
Notes
-----
the implementation here is a bit hacky. we create a temporary xyz file from coords
and load the molecule in pymol from this file.
"""
# pymol is imported here so you can do, e.g. basinhopping without installing pymol
import pymol
# create the temporary file
suffix = ".xyz"
f = tempfile.NamedTemporaryFile(mode="w", suffix=suffix)
fname = f.name
# write the coords into the xyz file
from pele.mindist import CoMToOrigin
for coords in coordslist:
coords = CoMToOrigin(coords.copy())
write_xyz(f, coords, title=oname, atomtypes=["LA"])
f.flush()
# load the molecule from the temporary file
pymol.cmd.load(fname)
# get name of the object just create and change it to oname
objects = pymol.cmd.get_object_list()
objectname = objects[-1]
pymol.cmd.set_name(objectname, oname)
# set the representation
pymol.cmd.hide("everything", oname)
pymol.cmd.show("spheres", oname)
# set the color according to index
if index == 1:
pymol.cmd.color("red", oname)
else:
pymol.cmd.color("gray", oname)
#
# only for testing below here
#
def run(): # pragma: no cover
# create the system object
sys = LJCluster(15)
# create a database
db = sys.create_database()
# do a short basinhopping run
bh = sys.get_basinhopping(database=db, outstream=None)
while len(db.minima()) < 2:
bh.run(100)
# try to connect the lowest two minima
min1, min2 = db.minima()[:2]
connect = sys.get_double_ended_connect(min1, min2, db)
connect.connect()
if __name__ == "__main__":
run()
|
cjforman/pele
|
pele/systems/ljcluster.py
|
Python
|
gpl-3.0
| 4,080
|
[
"PyMOL"
] |
928e8ac6dd1a41b4933343f21f4813fed57ea46d4d232e9f4295a6f9ec0db60d
|
#! /usr/bin/env python
"""
Get Pilots Logging for specific Pilot UUID or Job ID.
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC import S_OK
from DIRAC.Core.Base import Script
from DIRAC.WorkloadManagementSystem.Client.PilotsLoggingClient import PilotsLoggingClient
from DIRAC.WorkloadManagementSystem.DB.PilotAgentsDB import PilotAgentsDB
from DIRAC.Core.Utilities.PrettyPrint import printTable
uuid = None
jobid = None
def setUUID(optVal):
"""
Set UUID from arguments
"""
global uuid
uuid = optVal
return S_OK()
def setJobID(optVal):
"""
Set JobID from arguments
"""
global jobid
jobid = optVal
return S_OK()
Script.registerSwitch('u:', 'uuid=', 'get PilotsLogging for given Pilot UUID', setUUID)
Script.registerSwitch('j:', 'jobid=', 'get PilotsLogging for given Job ID', setJobID)
Script.setUsageMessage('\n'.join([__doc__.split('\n')[1],
'Usage:',
' %s option value ' % Script.scriptName,
'Only one option (either uuid or jobid) should be used.']))
Script.parseCommandLine()
def printPilotsLogging(logs):
"""
Print results using printTable from PrettyPrint
"""
content = []
labels = ['pilotUUID', 'timestamp', 'source', 'phase', 'status', 'messageContent']
for log in logs:
content.append([log[label] for label in labels])
printTable(labels, content, numbering=False, columnSeparator=' | ')
if uuid:
pilotsLogging = PilotsLoggingClient()
result = pilotsLogging.getPilotsLogging(uuid)
if not result['OK']:
print 'ERROR: %s' % result['Message']
DIRAC.exit(1)
printPilotsLogging(result['Value'])
DIRAC.exit(0)
else:
pilotDB = PilotAgentsDB()
pilotsLogging = PilotsLoggingClient()
pilots = pilotDB.getPilotsForJobID(jobid)
if not pilots['OK ']:
print pilots['Message']
for pilotID in pilots:
info = pilotDB.getPilotInfo(pilotID=pilotID)
if not info['OK']:
print info['Message']
for pilot in info:
logging = pilotsLogging.getPilotsLogging(pilot['PilotJobReference'])
if not logging['OK']:
print logging['Message']
printPilotsLogging(logging)
DIRAC.exit(0)
|
arrabito/DIRAC
|
WorkloadManagementSystem/scripts/dirac-admin-pilot-logging-info.py
|
Python
|
gpl-3.0
| 2,194
|
[
"DIRAC"
] |
499c40a20bed735c704c13b7901a23ecf6790dd1d192a8af29efa2cca78b1cc5
|
#from logging import info
import multiprocessing
from scipy.stats import rankdata
from logging import info
import numpy
import itertools
import pysam
import array
from .fileParser import parse_file_by_strand
BIN = 1000
def estimate_shiftsizes(parameter):
''' the root function for estimating the shiftsizes'''
## estimate the shift size for chip samples.
if parameter.num_procs < 2:
for chip in parameter.get_chip_filenames():
shift_size, bin_array = estimate_shiftsize(chip, parameter)
parameter.shift_dict[chip] = shift_size
parameter.bin_dict[chip] = bin_array
else:
pool = multiprocessing.Pool(parameter.num_procs)
p = pool.map_async(estimate_shiftsize_wrapper, zip(parameter.get_chip_filenames(), itertools.repeat(parameter)),1)
try: results = p.get()
except KeyboardInterrupt:
exit(1)
for chip, result in zip(parameter.get_chip_filenames(), results):
parameter.shift_dict[chip] = result[0]
parameter.bin_dict[chip] = result[1]
# put the shift size for input samples
if len(parameter.input1) > 0:
for input in parameter.input1:
shift_list = [parameter.shift_dict[file] for file in parameter.chip1]
parameter.shift_dict[input] = sum(shift_list)/len(shift_list)
if len(parameter.input2) > 0:
for input in parameter.input2:
shift_list = [parameter.shift_dict[file] for file in parameter.chip2]
parameter.shift_dict[input] = sum(shift_list)/len(shift_list)
return
def estimate_shiftsize_wrapper(args):
try:
return estimate_shiftsize(*args)
except KeyboardInterrupt:
pass
def estimate_shiftsize(chip, parameter):
''' estimate the shiftsize for each file '''
info("estimating the shift size for %s", chip)
info_dict = {} # saving the info matrix for deriving the shift size data.
bin_dict = {}
for chr in parameter.chr_info:
row_num = int(parameter.chr_info[chr]/BIN)
bin_dict[chr] = numpy.zeros(row_num, dtype=numpy.float64)
info_dict[chr] = numpy.zeros((row_num,4),dtype=numpy.int64)
# parsing file into strand
forward, reverse = parse_file_by_strand[parameter.file_format](chip, parameter.input_directory)
shift_list =[]
for chr in parameter.get_top3_chr():
try:
chr_f,chr_r = forward[chr],reverse[chr]
shift_list.append(cross_cor(chr_f,chr_r))
except KeyError:
print("Only one strand detected. Skipping")
shift_list.sort()
frag_size = shift_list[int(len(shift_list)/2)]
### parse the reads into bins
for chr in parameter.chr_info:
if chr in forward:
for pos in forward[chr]:
try: bin_dict[chr][int(pos/BIN)] += 1
except IndexError:
# print("Index Error")
pass
if chr in reverse:
for pos in reverse[chr]:
try: bin_dict[chr][int(pos/BIN)] += 1
except IndexError:
# print("Index Error")
pass
try: bin_array = numpy.append(bin_array, bin_dict[chr])
except UnboundLocalError:
bin_array = bin_dict[chr]
#frag_size += parameter.read_length_dict[chip]
shift_size = int(frag_size/2)
info("The shift size for %s is %d", chip, shift_size)
return (shift_size, bin_array)
def cross_cor(f, r):
npf = numpy.array(f)
npr = numpy.array(r)
cor_list = []
for x in range(50,302,2):
#print x
y = len(numpy.intersect1d(npf+x,npr))
cor_list.append(y)
return range(50,302,2)[cor_list.index(max(cor_list))]
|
shawnzhangyx/PePr
|
PePr/pre_processing/shiftSize.py
|
Python
|
gpl-3.0
| 3,784
|
[
"pysam"
] |
dc8ebcefc800f2cc95af3d1dec42704cffe67cdbc65dd6ca55d61e419c6ca6a9
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import warnings
from pyspark import since, keyword_only
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaParams, JavaWrapper
from pyspark.ml.param.shared import *
from pyspark.ml.common import inherit_doc
from pyspark.sql import DataFrame
__all__ = ['BisectingKMeans', 'BisectingKMeansModel', 'BisectingKMeansSummary',
'KMeans', 'KMeansModel',
'GaussianMixture', 'GaussianMixtureModel', 'GaussianMixtureSummary',
'LDA', 'LDAModel', 'LocalLDAModel', 'DistributedLDAModel', 'PowerIterationClustering']
class ClusteringSummary(JavaWrapper):
"""
.. note:: Experimental
Clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def predictionCol(self):
"""
Name for column of predicted clusters in `predictions`.
"""
return self._call_java("predictionCol")
@property
@since("2.1.0")
def predictions(self):
"""
DataFrame produced by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.1.0")
def featuresCol(self):
"""
Name for column of features in `predictions`.
"""
return self._call_java("featuresCol")
@property
@since("2.1.0")
def k(self):
"""
The number of clusters the model was trained with.
"""
return self._call_java("k")
@property
@since("2.1.0")
def cluster(self):
"""
DataFrame of predicted cluster centers for each training data point.
"""
return self._call_java("cluster")
@property
@since("2.1.0")
def clusterSizes(self):
"""
Size of (number of data points in) each cluster.
"""
return self._call_java("clusterSizes")
@property
@since("2.4.0")
def numIter(self):
"""
Number of iterations.
"""
return self._call_java("numIter")
class GaussianMixtureModel(JavaModel, JavaMLWritable, JavaMLReadable, HasTrainingSummary):
"""
Model fitted by GaussianMixture.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def weights(self):
"""
Weight for each Gaussian distribution in the mixture.
This is a multinomial probability distribution over the k Gaussians,
where weights[i] is the weight for Gaussian i, and weights sum to 1.
"""
return self._call_java("weights")
@property
@since("2.0.0")
def gaussiansDF(self):
"""
Retrieve Gaussian distributions as a DataFrame.
Each row represents a Gaussian Distribution.
The DataFrame has two columns: mean (Vector) and cov (Matrix).
"""
return self._call_java("gaussiansDF")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return GaussianMixtureSummary(super(GaussianMixtureModel, self).summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class GaussianMixture(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol, HasSeed,
HasProbabilityCol, JavaMLWritable, JavaMLReadable):
"""
GaussianMixture clustering.
This class performs expectation maximization for multivariate Gaussian
Mixture Models (GMMs). A GMM represents a composite distribution of
independent Gaussian distributions with associated "mixing" weights
specifying each's contribution to the composite.
Given a set of sample points, this class will maximize the log-likelihood
for a mixture of k Gaussians, iterating until the log-likelihood changes by
less than convergenceTol, or until it has reached the max number of iterations.
While this process is generally guaranteed to converge, it is not guaranteed
to find a global optimum.
.. note:: For high-dimensional data (with many features), this algorithm may perform poorly.
This is due to high-dimensional data (a) making it difficult to cluster at all
(based on statistical/theoretical arguments) and (b) numerical issues with
Gaussian distributions.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([-0.1, -0.05 ]),),
... (Vectors.dense([-0.01, -0.1]),),
... (Vectors.dense([0.9, 0.8]),),
... (Vectors.dense([0.75, 0.935]),),
... (Vectors.dense([-0.83, -0.68]),),
... (Vectors.dense([-0.91, -0.76]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> gm = GaussianMixture(k=3, tol=0.0001,
... maxIter=10, seed=10)
>>> model = gm.fit(df)
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
3
>>> summary.clusterSizes
[2, 2, 2]
>>> summary.logLikelihood
8.14636...
>>> weights = model.weights
>>> len(weights)
3
>>> model.gaussiansDF.select("mean").head()
Row(mean=DenseVector([0.825, 0.8675]))
>>> model.gaussiansDF.select("cov").head()
Row(cov=DenseMatrix(2, 2, [0.0056, -0.0051, -0.0051, 0.0046], False))
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[4].prediction == rows[5].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> gmm_path = temp_path + "/gmm"
>>> gm.save(gmm_path)
>>> gm2 = GaussianMixture.load(gmm_path)
>>> gm2.getK()
3
>>> model_path = temp_path + "/gmm_model"
>>> model.save(model_path)
>>> model2 = GaussianMixtureModel.load(model_path)
>>> model2.hasSummary
False
>>> model2.weights == model.weights
True
>>> model2.gaussiansDF.select("mean").head()
Row(mean=DenseVector([0.825, 0.8675]))
>>> model2.gaussiansDF.select("cov").head()
Row(cov=DenseMatrix(2, 2, [0.0056, -0.0051, -0.0051, 0.0046], False))
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "Number of independent Gaussians in the mixture model. " +
"Must be > 1.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
"""
super(GaussianMixture, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.GaussianMixture",
self.uid)
self._setDefault(k=2, tol=0.01, maxIter=100)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return GaussianMixtureModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
Sets params for GaussianMixture.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
class GaussianMixtureSummary(ClusteringSummary):
"""
.. note:: Experimental
Gaussian mixture clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def probabilityCol(self):
"""
Name for column of predicted probability of each cluster in `predictions`.
"""
return self._call_java("probabilityCol")
@property
@since("2.1.0")
def probability(self):
"""
DataFrame of probabilities of each cluster for each training data point.
"""
return self._call_java("probability")
@property
@since("2.2.0")
def logLikelihood(self):
"""
Total log-likelihood for this model on the given data.
"""
return self._call_java("logLikelihood")
class KMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Summary of KMeans.
.. versionadded:: 2.1.0
"""
@property
@since("2.4.0")
def trainingCost(self):
"""
K-means cost (sum of squared distances to the nearest centroid for all points in the
training dataset). This is equivalent to sklearn's inertia.
"""
return self._call_java("trainingCost")
class KMeansModel(JavaModel, GeneralJavaMLWritable, JavaMLReadable, HasTrainingSummary):
"""
Model fitted by KMeans.
.. versionadded:: 1.5.0
"""
@since("1.5.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return KMeansSummary(super(KMeansModel, self).summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class KMeans(JavaEstimator, HasDistanceMeasure, HasFeaturesCol, HasPredictionCol, HasMaxIter,
HasTol, HasSeed, JavaMLWritable, JavaMLReadable):
"""
K-means clustering with a k-means++ like initialization mode
(the k-means|| algorithm by Bahmani et al).
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> kmeans = KMeans(k=2, seed=1)
>>> model = kmeans.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> summary.trainingCost
2.0
>>> kmeans_path = temp_path + "/kmeans"
>>> kmeans.save(kmeans_path)
>>> kmeans2 = KMeans.load(kmeans_path)
>>> kmeans2.getK()
2
>>> model_path = temp_path + "/kmeans_model"
>>> model.save(model_path)
>>> model2 = KMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 1.5.0
"""
k = Param(Params._dummy(), "k", "The number of clusters to create. Must be > 1.",
typeConverter=TypeConverters.toInt)
initMode = Param(Params._dummy(), "initMode",
"The initialization algorithm. This can be either \"random\" to " +
"choose random points as initial cluster centers, or \"k-means||\" " +
"to use a parallel variant of k-means++",
typeConverter=TypeConverters.toString)
initSteps = Param(Params._dummy(), "initSteps", "The number of steps for k-means|| " +
"initialization mode. Must be > 0.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None,
distanceMeasure="euclidean"):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None, \
distanceMeasure="euclidean")
"""
super(KMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.KMeans", self.uid)
self._setDefault(k=2, initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20,
distanceMeasure="euclidean")
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return KMeansModel(java_model)
@keyword_only
@since("1.5.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None,
distanceMeasure="euclidean"):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None, \
distanceMeasure="euclidean")
Sets params for KMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("1.5.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("1.5.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
@since("1.5.0")
def setInitMode(self, value):
"""
Sets the value of :py:attr:`initMode`.
"""
return self._set(initMode=value)
@since("1.5.0")
def getInitMode(self):
"""
Gets the value of `initMode`
"""
return self.getOrDefault(self.initMode)
@since("1.5.0")
def setInitSteps(self, value):
"""
Sets the value of :py:attr:`initSteps`.
"""
return self._set(initSteps=value)
@since("1.5.0")
def getInitSteps(self):
"""
Gets the value of `initSteps`
"""
return self.getOrDefault(self.initSteps)
@since("2.4.0")
def setDistanceMeasure(self, value):
"""
Sets the value of :py:attr:`distanceMeasure`.
"""
return self._set(distanceMeasure=value)
@since("2.4.0")
def getDistanceMeasure(self):
"""
Gets the value of `distanceMeasure`
"""
return self.getOrDefault(self.distanceMeasure)
class BisectingKMeansModel(JavaModel, JavaMLWritable, JavaMLReadable, HasTrainingSummary):
"""
Model fitted by BisectingKMeans.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@since("2.0.0")
def computeCost(self, dataset):
"""
Computes the sum of squared distances between the input points
and their corresponding cluster centers.
..note:: Deprecated in 3.0.0. It will be removed in future versions. Use
ClusteringEvaluator instead. You can also get the cost on the training dataset in the
summary.
"""
warnings.warn("Deprecated in 3.0.0. It will be removed in future versions. Use "
"ClusteringEvaluator instead. You can also get the cost on the training "
"dataset in the summary.", DeprecationWarning)
return self._call_java("computeCost", dataset)
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return BisectingKMeansSummary(super(BisectingKMeansModel, self).summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class BisectingKMeans(JavaEstimator, HasDistanceMeasure, HasFeaturesCol, HasPredictionCol,
HasMaxIter, HasSeed, JavaMLWritable, JavaMLReadable):
"""
A bisecting k-means algorithm based on the paper "A comparison of document clustering
techniques" by Steinbach, Karypis, and Kumar, with modification to fit Spark.
The algorithm starts from a single cluster that contains all points.
Iteratively it finds divisible clusters on the bottom level and bisects each of them using
k-means, until there are `k` leaf clusters in total or no leaf clusters are divisible.
The bisecting steps of clusters on the same level are grouped together to increase parallelism.
If bisecting all divisible clusters on the bottom level would result more than `k` leaf
clusters, larger clusters get higher priority.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> bkm = BisectingKMeans(k=2, minDivisibleClusterSize=1.0)
>>> model = bkm.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> model.computeCost(df)
2.0
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> summary.trainingCost
2.000...
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> bkm_path = temp_path + "/bkm"
>>> bkm.save(bkm_path)
>>> bkm2 = BisectingKMeans.load(bkm_path)
>>> bkm2.getK()
2
>>> bkm2.getDistanceMeasure()
'euclidean'
>>> model_path = temp_path + "/bkm_model"
>>> model.save(model_path)
>>> model2 = BisectingKMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The desired number of leaf clusters. Must be > 1.",
typeConverter=TypeConverters.toInt)
minDivisibleClusterSize = Param(Params._dummy(), "minDivisibleClusterSize",
"The minimum number of points (if >= 1.0) or the minimum " +
"proportion of points (if < 1.0) of a divisible cluster.",
typeConverter=TypeConverters.toFloat)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0, distanceMeasure="euclidean"):
"""
__init__(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0, distanceMeasure="euclidean")
"""
super(BisectingKMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.BisectingKMeans",
self.uid)
self._setDefault(maxIter=20, k=4, minDivisibleClusterSize=1.0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0, distanceMeasure="euclidean"):
"""
setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0, distanceMeasure="euclidean")
Sets params for BisectingKMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setMinDivisibleClusterSize(self, value):
"""
Sets the value of :py:attr:`minDivisibleClusterSize`.
"""
return self._set(minDivisibleClusterSize=value)
@since("2.0.0")
def getMinDivisibleClusterSize(self):
"""
Gets the value of `minDivisibleClusterSize` or its default value.
"""
return self.getOrDefault(self.minDivisibleClusterSize)
@since("2.4.0")
def setDistanceMeasure(self, value):
"""
Sets the value of :py:attr:`distanceMeasure`.
"""
return self._set(distanceMeasure=value)
@since("2.4.0")
def getDistanceMeasure(self):
"""
Gets the value of `distanceMeasure` or its default value.
"""
return self.getOrDefault(self.distanceMeasure)
def _create_model(self, java_model):
return BisectingKMeansModel(java_model)
class BisectingKMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Bisecting KMeans clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("3.0.0")
def trainingCost(self):
"""
Sum of squared distances to the nearest centroid for all points in the training dataset.
This is equivalent to sklearn's inertia.
"""
return self._call_java("trainingCost")
@inherit_doc
class LDAModel(JavaModel):
"""
Latent Dirichlet Allocation (LDA) model.
This abstraction permits for different underlying representations,
including local and distributed data structures.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def isDistributed(self):
"""
Indicates whether this instance is of type DistributedLDAModel
"""
return self._call_java("isDistributed")
@since("2.0.0")
def vocabSize(self):
"""Vocabulary size (number of terms or words in the vocabulary)"""
return self._call_java("vocabSize")
@since("2.0.0")
def topicsMatrix(self):
"""
Inferred topics, where each topic is represented by a distribution over terms.
This is a matrix of size vocabSize x k, where each column is a topic.
No guarantees are given about the ordering of the topics.
WARNING: If this model is actually a :py:class:`DistributedLDAModel` instance produced by
the Expectation-Maximization ("em") `optimizer`, then this method could involve
collecting a large amount of data to the driver (on the order of vocabSize x k).
"""
return self._call_java("topicsMatrix")
@since("2.0.0")
def logLikelihood(self, dataset):
"""
Calculates a lower bound on the log likelihood of the entire corpus.
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logLikelihood", dataset)
@since("2.0.0")
def logPerplexity(self, dataset):
"""
Calculate an upper bound on perplexity. (Lower is better.)
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logPerplexity", dataset)
@since("2.0.0")
def describeTopics(self, maxTermsPerTopic=10):
"""
Return the topics described by their top-weighted terms.
"""
return self._call_java("describeTopics", maxTermsPerTopic)
@since("2.0.0")
def estimatedDocConcentration(self):
"""
Value for :py:attr:`LDA.docConcentration` estimated from data.
If Online LDA was used and :py:attr:`LDA.optimizeDocConcentration` was set to false,
then this returns the fixed (given) value for the :py:attr:`LDA.docConcentration` parameter.
"""
return self._call_java("estimatedDocConcentration")
@inherit_doc
class DistributedLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Distributed model fitted by :py:class:`LDA`.
This type of model is currently only produced by Expectation-Maximization (EM).
This model stores the inferred topics, the full training dataset, and the topic distribution
for each training document.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def toLocal(self):
"""
Convert this distributed model to a local representation. This discards info about the
training dataset.
WARNING: This involves collecting a large :py:func:`topicsMatrix` to the driver.
"""
model = LocalLDAModel(self._call_java("toLocal"))
# SPARK-10931: Temporary fix to be removed once LDAModel defines Params
model._create_params_from_java()
model._transfer_params_from_java()
return model
@since("2.0.0")
def trainingLogLikelihood(self):
"""
Log likelihood of the observed tokens in the training set,
given the current parameter estimates:
log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
Notes:
- This excludes the prior; for that, use :py:func:`logPrior`.
- Even with :py:func:`logPrior`, this is NOT the same as the data log likelihood given
the hyperparameters.
- This is computed from the topic distributions computed during training. If you call
:py:func:`logLikelihood` on the same training dataset, the topic distributions
will be computed again, possibly giving different results.
"""
return self._call_java("trainingLogLikelihood")
@since("2.0.0")
def logPrior(self):
"""
Log probability of the current parameter estimate:
log P(topics, topic distributions for docs | alpha, eta)
"""
return self._call_java("logPrior")
@since("2.0.0")
def getCheckpointFiles(self):
"""
If using checkpointing and :py:attr:`LDA.keepLastCheckpoint` is set to true, then there may
be saved checkpoint files. This method is provided so that users can manage those files.
.. note:: Removing the checkpoints can cause failures if a partition is lost and is needed
by certain :py:class:`DistributedLDAModel` methods. Reference counting will clean up
the checkpoints when this model and derivative data go out of scope.
:return List of checkpoint files from training
"""
return self._call_java("getCheckpointFiles")
@inherit_doc
class LocalLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Local (non-distributed) model fitted by :py:class:`LDA`.
This model stores the inferred topics only; it does not store info about the training dataset.
.. versionadded:: 2.0.0
"""
pass
@inherit_doc
class LDA(JavaEstimator, HasFeaturesCol, HasMaxIter, HasSeed, HasCheckpointInterval,
JavaMLReadable, JavaMLWritable):
"""
Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
Terminology:
- "term" = "word": an element of the vocabulary
- "token": instance of a term appearing in a document
- "topic": multinomial distribution over terms representing some concept
- "document": one piece of text, corresponding to one row in the input data
Original LDA paper (journal version):
Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
Input data (featuresCol):
LDA is given a collection of documents as input data, via the featuresCol parameter.
Each document is specified as a :py:class:`Vector` of length vocabSize, where each entry is the
count for the corresponding term (word) in the document. Feature transformers such as
:py:class:`pyspark.ml.feature.Tokenizer` and :py:class:`pyspark.ml.feature.CountVectorizer`
can be useful for converting text to word count vectors.
>>> from pyspark.ml.linalg import Vectors, SparseVector
>>> from pyspark.ml.clustering import LDA
>>> df = spark.createDataFrame([[1, Vectors.dense([0.0, 1.0])],
... [2, SparseVector(2, {0: 1.0})],], ["id", "features"])
>>> lda = LDA(k=2, seed=1, optimizer="em")
>>> model = lda.fit(df)
>>> model.isDistributed()
True
>>> localModel = model.toLocal()
>>> localModel.isDistributed()
False
>>> model.vocabSize()
2
>>> model.describeTopics().show()
+-----+-----------+--------------------+
|topic|termIndices| termWeights|
+-----+-----------+--------------------+
| 0| [1, 0]|[0.50401530077160...|
| 1| [0, 1]|[0.50401530077160...|
+-----+-----------+--------------------+
...
>>> model.topicsMatrix()
DenseMatrix(2, 2, [0.496, 0.504, 0.504, 0.496], 0)
>>> lda_path = temp_path + "/lda"
>>> lda.save(lda_path)
>>> sameLDA = LDA.load(lda_path)
>>> distributed_model_path = temp_path + "/lda_distributed_model"
>>> model.save(distributed_model_path)
>>> sameModel = DistributedLDAModel.load(distributed_model_path)
>>> local_model_path = temp_path + "/lda_local_model"
>>> localModel.save(local_model_path)
>>> sameLocalModel = LocalLDAModel.load(local_model_path)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The number of topics (clusters) to infer. Must be > 1.",
typeConverter=TypeConverters.toInt)
optimizer = Param(Params._dummy(), "optimizer",
"Optimizer or inference algorithm used to estimate the LDA model. "
"Supported: online, em", typeConverter=TypeConverters.toString)
learningOffset = Param(Params._dummy(), "learningOffset",
"A (positive) learning parameter that downweights early iterations."
" Larger values make early iterations count less",
typeConverter=TypeConverters.toFloat)
learningDecay = Param(Params._dummy(), "learningDecay", "Learning rate, set as an"
"exponential decay rate. This should be between (0.5, 1.0] to "
"guarantee asymptotic convergence.", typeConverter=TypeConverters.toFloat)
subsamplingRate = Param(Params._dummy(), "subsamplingRate",
"Fraction of the corpus to be sampled and used in each iteration "
"of mini-batch gradient descent, in range (0, 1].",
typeConverter=TypeConverters.toFloat)
optimizeDocConcentration = Param(Params._dummy(), "optimizeDocConcentration",
"Indicates whether the docConcentration (Dirichlet parameter "
"for document-topic distribution) will be optimized during "
"training.", typeConverter=TypeConverters.toBoolean)
docConcentration = Param(Params._dummy(), "docConcentration",
"Concentration parameter (commonly named \"alpha\") for the "
"prior placed on documents' distributions over topics (\"theta\").",
typeConverter=TypeConverters.toListFloat)
topicConcentration = Param(Params._dummy(), "topicConcentration",
"Concentration parameter (commonly named \"beta\" or \"eta\") for "
"the prior placed on topic' distributions over terms.",
typeConverter=TypeConverters.toFloat)
topicDistributionCol = Param(Params._dummy(), "topicDistributionCol",
"Output column with estimates of the topic mixture distribution "
"for each document (often called \"theta\" in the literature). "
"Returns a vector of zeros for an empty document.",
typeConverter=TypeConverters.toString)
keepLastCheckpoint = Param(Params._dummy(), "keepLastCheckpoint",
"(For EM optimizer) If using checkpointing, this indicates whether"
" to keep the last checkpoint. If false, then the checkpoint will be"
" deleted. Deleting the checkpoint can cause failures if a data"
" partition is lost, so set this bit with care.",
TypeConverters.toBoolean)
@keyword_only
def __init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
__init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True)
"""
super(LDA, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.LDA", self.uid)
self._setDefault(maxIter=20, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
if self.getOptimizer() == "em":
return DistributedLDAModel(java_model)
else:
return LocalLDAModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True)
Sets params for LDA.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
>>> algo = LDA().setK(10)
>>> algo.getK()
10
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of :py:attr:`k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setOptimizer(self, value):
"""
Sets the value of :py:attr:`optimizer`.
Currently only support 'em' and 'online'.
>>> algo = LDA().setOptimizer("em")
>>> algo.getOptimizer()
'em'
"""
return self._set(optimizer=value)
@since("2.0.0")
def getOptimizer(self):
"""
Gets the value of :py:attr:`optimizer` or its default value.
"""
return self.getOrDefault(self.optimizer)
@since("2.0.0")
def setLearningOffset(self, value):
"""
Sets the value of :py:attr:`learningOffset`.
>>> algo = LDA().setLearningOffset(100)
>>> algo.getLearningOffset()
100.0
"""
return self._set(learningOffset=value)
@since("2.0.0")
def getLearningOffset(self):
"""
Gets the value of :py:attr:`learningOffset` or its default value.
"""
return self.getOrDefault(self.learningOffset)
@since("2.0.0")
def setLearningDecay(self, value):
"""
Sets the value of :py:attr:`learningDecay`.
>>> algo = LDA().setLearningDecay(0.1)
>>> algo.getLearningDecay()
0.1...
"""
return self._set(learningDecay=value)
@since("2.0.0")
def getLearningDecay(self):
"""
Gets the value of :py:attr:`learningDecay` or its default value.
"""
return self.getOrDefault(self.learningDecay)
@since("2.0.0")
def setSubsamplingRate(self, value):
"""
Sets the value of :py:attr:`subsamplingRate`.
>>> algo = LDA().setSubsamplingRate(0.1)
>>> algo.getSubsamplingRate()
0.1...
"""
return self._set(subsamplingRate=value)
@since("2.0.0")
def getSubsamplingRate(self):
"""
Gets the value of :py:attr:`subsamplingRate` or its default value.
"""
return self.getOrDefault(self.subsamplingRate)
@since("2.0.0")
def setOptimizeDocConcentration(self, value):
"""
Sets the value of :py:attr:`optimizeDocConcentration`.
>>> algo = LDA().setOptimizeDocConcentration(True)
>>> algo.getOptimizeDocConcentration()
True
"""
return self._set(optimizeDocConcentration=value)
@since("2.0.0")
def getOptimizeDocConcentration(self):
"""
Gets the value of :py:attr:`optimizeDocConcentration` or its default value.
"""
return self.getOrDefault(self.optimizeDocConcentration)
@since("2.0.0")
def setDocConcentration(self, value):
"""
Sets the value of :py:attr:`docConcentration`.
>>> algo = LDA().setDocConcentration([0.1, 0.2])
>>> algo.getDocConcentration()
[0.1..., 0.2...]
"""
return self._set(docConcentration=value)
@since("2.0.0")
def getDocConcentration(self):
"""
Gets the value of :py:attr:`docConcentration` or its default value.
"""
return self.getOrDefault(self.docConcentration)
@since("2.0.0")
def setTopicConcentration(self, value):
"""
Sets the value of :py:attr:`topicConcentration`.
>>> algo = LDA().setTopicConcentration(0.5)
>>> algo.getTopicConcentration()
0.5...
"""
return self._set(topicConcentration=value)
@since("2.0.0")
def getTopicConcentration(self):
"""
Gets the value of :py:attr:`topicConcentration` or its default value.
"""
return self.getOrDefault(self.topicConcentration)
@since("2.0.0")
def setTopicDistributionCol(self, value):
"""
Sets the value of :py:attr:`topicDistributionCol`.
>>> algo = LDA().setTopicDistributionCol("topicDistributionCol")
>>> algo.getTopicDistributionCol()
'topicDistributionCol'
"""
return self._set(topicDistributionCol=value)
@since("2.0.0")
def getTopicDistributionCol(self):
"""
Gets the value of :py:attr:`topicDistributionCol` or its default value.
"""
return self.getOrDefault(self.topicDistributionCol)
@since("2.0.0")
def setKeepLastCheckpoint(self, value):
"""
Sets the value of :py:attr:`keepLastCheckpoint`.
>>> algo = LDA().setKeepLastCheckpoint(False)
>>> algo.getKeepLastCheckpoint()
False
"""
return self._set(keepLastCheckpoint=value)
@since("2.0.0")
def getKeepLastCheckpoint(self):
"""
Gets the value of :py:attr:`keepLastCheckpoint` or its default value.
"""
return self.getOrDefault(self.keepLastCheckpoint)
@inherit_doc
class PowerIterationClustering(HasMaxIter, HasWeightCol, JavaParams, JavaMLReadable,
JavaMLWritable):
"""
.. note:: Experimental
Power Iteration Clustering (PIC), a scalable graph clustering algorithm developed by
`Lin and Cohen <http://www.cs.cmu.edu/~frank/papers/icml2010-pic-final.pdf>`_. From the
abstract: PIC finds a very low-dimensional embedding of a dataset using truncated power
iteration on a normalized pair-wise similarity matrix of the data.
This class is not yet an Estimator/Transformer, use :py:func:`assignClusters` method
to run the PowerIterationClustering algorithm.
.. seealso:: `Wikipedia on Spectral clustering
<http://en.wikipedia.org/wiki/Spectral_clustering>`_
>>> data = [(1, 0, 0.5),
... (2, 0, 0.5), (2, 1, 0.7),
... (3, 0, 0.5), (3, 1, 0.7), (3, 2, 0.9),
... (4, 0, 0.5), (4, 1, 0.7), (4, 2, 0.9), (4, 3, 1.1),
... (5, 0, 0.5), (5, 1, 0.7), (5, 2, 0.9), (5, 3, 1.1), (5, 4, 1.3)]
>>> df = spark.createDataFrame(data).toDF("src", "dst", "weight")
>>> pic = PowerIterationClustering(k=2, maxIter=40, weightCol="weight")
>>> assignments = pic.assignClusters(df)
>>> assignments.sort(assignments.id).show(truncate=False)
+---+-------+
|id |cluster|
+---+-------+
|0 |1 |
|1 |1 |
|2 |1 |
|3 |1 |
|4 |1 |
|5 |0 |
+---+-------+
...
>>> pic_path = temp_path + "/pic"
>>> pic.save(pic_path)
>>> pic2 = PowerIterationClustering.load(pic_path)
>>> pic2.getK()
2
>>> pic2.getMaxIter()
40
.. versionadded:: 2.4.0
"""
k = Param(Params._dummy(), "k",
"The number of clusters to create. Must be > 1.",
typeConverter=TypeConverters.toInt)
initMode = Param(Params._dummy(), "initMode",
"The initialization algorithm. This can be either " +
"'random' to use a random vector as vertex properties, or 'degree' to use " +
"a normalized sum of similarities with other vertices. Supported options: " +
"'random' and 'degree'.",
typeConverter=TypeConverters.toString)
srcCol = Param(Params._dummy(), "srcCol",
"Name of the input column for source vertex IDs.",
typeConverter=TypeConverters.toString)
dstCol = Param(Params._dummy(), "dstCol",
"Name of the input column for destination vertex IDs.",
typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst",
weightCol=None):
"""
__init__(self, k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst",\
weightCol=None)
"""
super(PowerIterationClustering, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.clustering.PowerIterationClustering", self.uid)
self._setDefault(k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.4.0")
def setParams(self, k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst",
weightCol=None):
"""
setParams(self, k=2, maxIter=20, initMode="random", srcCol="src", dstCol="dst",\
weightCol=None)
Sets params for PowerIterationClustering.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.4.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.4.0")
def getK(self):
"""
Gets the value of :py:attr:`k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.4.0")
def setInitMode(self, value):
"""
Sets the value of :py:attr:`initMode`.
"""
return self._set(initMode=value)
@since("2.4.0")
def getInitMode(self):
"""
Gets the value of :py:attr:`initMode` or its default value.
"""
return self.getOrDefault(self.initMode)
@since("2.4.0")
def setSrcCol(self, value):
"""
Sets the value of :py:attr:`srcCol`.
"""
return self._set(srcCol=value)
@since("2.4.0")
def getSrcCol(self):
"""
Gets the value of :py:attr:`srcCol` or its default value.
"""
return self.getOrDefault(self.srcCol)
@since("2.4.0")
def setDstCol(self, value):
"""
Sets the value of :py:attr:`dstCol`.
"""
return self._set(dstCol=value)
@since("2.4.0")
def getDstCol(self):
"""
Gets the value of :py:attr:`dstCol` or its default value.
"""
return self.getOrDefault(self.dstCol)
@since("2.4.0")
def assignClusters(self, dataset):
"""
Run the PIC algorithm and returns a cluster assignment for each input vertex.
:param dataset:
A dataset with columns src, dst, weight representing the affinity matrix,
which is the matrix A in the PIC paper. Suppose the src column value is i,
the dst column value is j, the weight column value is similarity s,,ij,,
which must be nonnegative. This is a symmetric matrix and hence
s,,ij,, = s,,ji,,. For any (i, j) with nonzero similarity, there should be
either (i, j, s,,ij,,) or (j, i, s,,ji,,) in the input. Rows with i = j are
ignored, because we assume s,,ij,, = 0.0.
:return:
A dataset that contains columns of vertex id and the corresponding cluster for
the id. The schema of it will be:
- id: Long
- cluster: Int
.. versionadded:: 2.4.0
"""
self._transfer_params_to_java()
jdf = self._java_obj.assignClusters(dataset._jdf)
return DataFrame(jdf, dataset.sql_ctx)
if __name__ == "__main__":
import doctest
import numpy
import pyspark.ml.clustering
from pyspark.sql import SparkSession
try:
# Numpy 1.14+ changed it's string format.
numpy.set_printoptions(legacy='1.13')
except TypeError:
pass
globs = pyspark.ml.clustering.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder\
.master("local[2]")\
.appName("ml.clustering tests")\
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
import tempfile
temp_path = tempfile.mkdtemp()
globs['temp_path'] = temp_path
try:
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
finally:
from shutil import rmtree
try:
rmtree(temp_path)
except OSError:
pass
if failure_count:
sys.exit(-1)
|
WindCanDie/spark
|
python/pyspark/ml/clustering.py
|
Python
|
apache-2.0
| 49,801
|
[
"Gaussian"
] |
ff9878cb71682bc90d36c3b3f9abf1f379b3cf7e220a8fc6082abbe552e1d84d
|
#! /usr/bin/env python
# This file is part of python-misp.
#
# Copyright 2015 Nicolas Bareil <nicolas.bareil@airbus.com>
# while at Airbus Group CERT <http://www.airbus.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mispy.misp import *
class MispEventTest(unittest.TestCase):
def test_good_xml(self):
s = r"""<Event>
<id>42</id>
<orgc_id>2</orgc_id>
<org_id>2</org_id>
<date>2015-10-20</date>
<threat_level_id>3</threat_level_id>
<info>AGNOSTIC PANDA</info>
<published>1</published>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
<analysis>2</analysis>
<timestamp>1445434988</timestamp>
<distribution>1</distribution>
<publish_timestamp>1445435155</publish_timestamp>
<sharing_group_id>0</sharing_group_id>
<Org>
<id>2</id>
<name>ACME and bro.</name>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
</Org>
<Orgc>
<id>2</id>
<name>ACME Corporation</name>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
</Orgc>
<Attribute>
<id>4442</id>
<type>md5</type>
<category>Payload delivery</category>
<to_ids>1</to_ids>
<uuid>56c577ed-94e0-4446-a639-40200a3ac101</uuid>
<event_id>1172</event_id>
<distribution>5</distribution>
<timestamp>1455781869</timestamp>
<comment/>
<sharing_group_id>0</sharing_group_id>
<value>a283e768fa12ef33087f07b01f82d6dd</value>
<ShadowAttribute/>
</Attribute>
<ShadowAttribute/>
<RelatedEvent/>
</Event>
"""
m = MispEvent.from_xml(s)
self.assertEqual(m.uuid, '56278fd8-f2c0-4907-bcca-594e0a3ac101')
self.assertEqual(m.id, 42)
self.assertEqual(m.org, 'ACME and bro.')
self.assertEqual(m.date, '2015-10-20')
self.assertEqual(m.threat_level_id, 3)
self.assertEqual(m.info, 'AGNOSTIC PANDA')
self.assertEqual(m.published, 1)
self.assertEqual(m.analysis, 2)
self.assertEqual(m.timestamp, 1445434988)
self.assertEqual(m.distribution, 1)
self.assertEqual(m.orgc, 'ACME Corporation')
self.assertEqual(m.locked, 0)
self.assertEqual(m.publish_timestamp, 1445435155)
for attr in m.attributes:
self.assertEqual(attr.value, 'a283e768fa12ef33087f07b01f82d6dd')
def test_good_xml_full_generation(self):
s = r"""<Event>
<id>42</id>
<Org>
<name>ACME and bro.</name>
<id>12</id>
<uuid>464d9146-2c34-43df-906a-7bc40a3ac101</uuid>
</Org>
<Orgc>
<name>ACME Corporation</name>
<id>13</id>
<uuid>164d9146-2c34-43df-906a-7bc40a3ac101</uuid>
</Orgc> <date>2015-10-20</date>
<threat_level_id>3</threat_level_id>
<info>AGNOSTIC PANDA</info>
<published>1</published>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
<attribute_count>8</attribute_count>
<analysis>2</analysis>
<timestamp>1445434988</timestamp>
<distribution>1</distribution>
<proposal_email_lock>0</proposal_email_lock>
<locked>0</locked>
<publish_timestamp>1445435155</publish_timestamp>
</Event>
"""
m = MispEvent.from_xml(s)
new = m.to_xml()
m = MispEvent.from_xml(new)
self.assertEqual(m.uuid, '56278fd8-f2c0-4907-bcca-594e0a3ac101')
self.assertEqual(m.id, 42)
self.assertEqual(m.org, 'ACME and bro.')
self.assertEqual(m.date, '2015-10-20')
self.assertEqual(m.threat_level_id, 3)
self.assertEqual(m.info, 'AGNOSTIC PANDA')
self.assertEqual(m.published, 1)
self.assertEqual(m.analysis, 2)
self.assertEqual(m.timestamp, 1445434988)
self.assertEqual(m.distribution, 1)
self.assertEqual(m.orgc, 'ACME Corporation')
self.assertEqual(m.locked, 0)
self.assertEqual(m.publish_timestamp, 1445435155)
def test_good_xml_generation(self):
company = 'ACME Corporation'
m = MispEvent()
m.org = company
serialized_evt = m.to_xml()
obj = MispEvent.from_xml(serialized_evt)
self.assertEqual(obj.org, company)
def test_bad_xml(self):
with self.assertRaises(lxml.etree.XMLSyntaxError):
MispEvent.from_xml('<foo')
def test_good_time_format(self):
m = MispEvent()
d = datetime.datetime.now()
m.publish_timestamp = d
self.assertEqual(m.publish_timestamp, int(time.mktime(d.timetuple())))
def test_tags_in_good_xml(self):
s = r"""<Event>
<id>42</id>
<orgc_id>2</orgc_id>
<org_id>2</org_id>
<date>2015-10-20</date>
<threat_level_id>3</threat_level_id>
<info>AGNOSTIC PANDA</info>
<published>1</published>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
<analysis>2</analysis>
<timestamp>1445434988</timestamp>
<distribution>1</distribution>
<publish_timestamp>1445435155</publish_timestamp>
<sharing_group_id>0</sharing_group_id>
<Org>
<id>2</id>
<name>ACME and bro.</name>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
</Org>
<Orgc>
<id>2</id>
<name>ACME Corporation</name>
<uuid>56278fd8-f2c0-4907-bcca-594e0a3ac101</uuid>
</Orgc>
<Attribute>
<id>4442</id>
<type>md5</type>
<category>Payload delivery</category>
<to_ids>1</to_ids>
<uuid>56c577ed-94e0-4446-a639-40200a3ac101</uuid>
<event_id>1172</event_id>
<distribution>5</distribution>
<timestamp>1455781869</timestamp>
<comment/>
<sharing_group_id>0</sharing_group_id>
<value>a283e768fa12ef33087f07b01f82d6dd</value>
<ShadowAttribute/>
</Attribute>
<ShadowAttribute/>
<RelatedEvent/>
<Tag><id>5</id><name>APT1</name><colour>#ffad0d</colour><exportable>1</exportable><org_id>0</org_id></Tag>
<Tag><id>3</id><name>TLP:RED</name><colour>#04cc18</colour><exportable>1</exportable><org_id>0</org_id></Tag>
<Tag><id>7</id><name>CONFIDENTIAL</name><colour>#cccccc</colour><exportable>1</exportable><org_id>0</org_id></Tag>
</Event>
"""
m = MispEvent.from_xml(s)
self.assertEqual(m.uuid, '56278fd8-f2c0-4907-bcca-594e0a3ac101')
self.assertEqual(m.id, 42)
self.assertEqual(m.org, 'ACME and bro.')
self.assertEqual(m.date, '2015-10-20')
self.assertEqual(m.threat_level_id, 3)
self.assertEqual(m.info, 'AGNOSTIC PANDA')
self.assertEqual(m.published, 1)
self.assertEqual(m.analysis, 2)
self.assertEqual(m.timestamp, 1445434988)
self.assertEqual(m.distribution, 1)
self.assertEqual(m.orgc, 'ACME Corporation')
self.assertEqual(len(m.tags), 3)
class MispTagTest(unittest.TestCase):
def test_from_xml(self):
s = r"""
<Tag><id>3</id><name>TLP:GREEN</name><colour>#04cc18</colour><exportable>1</exportable><org_id>0</org_id></Tag>
"""
tag = MispTag.from_xml(s)
self.assertEqual(tag.id, 3)
self.assertEqual(tag.name, "TLP:GREEN")
self.assertEqual(tag.colour, "#04cc18")
self.assertEqual(tag.exportable, True)
self.assertEqual(tag.org_id, 0)
class MispAttrTest(unittest.TestCase):
def test_fromtofrom_xml(self):
s = r"""<Attribute>
<id>87183</id>
<type>md5</type>
<category>Payload delivery</category>
<to_ids>1</to_ids>
<uuid>56c577ed-94e0-4446-a639-40200a3ac101</uuid>
<event_id>42</event_id>
<distribution>5</distribution>
<timestamp>1445434872</timestamp>
<comment>loooool</comment>
<sharing_group_id>0</sharing_group_id>
<value>a283e768fa12ef33087f07b01f82d6dd</value>
<ShadowAttribute/>
</Attribute>"""
a = MispAttribute.from_xml(s)
s = a.to_xml()
a = MispAttribute.from_xml(s)
self.assertEquals(a.type, 'md5')
self.assertEquals(a.category, 'Payload delivery')
self.assertEquals(a.to_ids, 1)
self.assertEquals(a.uuid, '56c577ed-94e0-4446-a639-40200a3ac101')
self.assertEquals(a.event_id, 42)
self.assertEquals(a.distribution, 5)
self.assertEquals(a.timestamp, 1445434872)
self.assertEquals(a.comment, 'loooool')
self.assertEquals(a.value, 'a283e768fa12ef33087f07b01f82d6dd')
self.assertEqual(a.value.__class__, str)
def test_from_xml(self):
s = r"""<Attribute>
<id>87183</id>
<type>md5</type>
<category>Payload delivery</category>
<to_ids>1</to_ids>
<uuid>56c577ed-94e0-4446-a639-40200a3ac101</uuid>
<event_id>42</event_id>
<distribution>5</distribution>
<timestamp>1445434872</timestamp>
<comment>loooool</comment>
<sharing_group_id>0</sharing_group_id>
<value>a283e768fa12ef33087f07b01f82d6dd</value>
<ShadowAttribute/>
</Attribute>"""
a = MispAttribute.from_xml(s)
self.assertEqual(a.id, 87183)
self.assertEqual(a.type, 'md5')
self.assertEqual(a.category, 'Payload delivery')
self.assertEqual(a.to_ids, 1)
self.assertEqual(a.uuid, '56c577ed-94e0-4446-a639-40200a3ac101')
self.assertEqual(a.event_id, 42)
self.assertEqual(a.distribution, 5)
self.assertEqual(a.timestamp, 1445434872)
self.assertEqual(a.comment, 'loooool')
self.assertEqual(a.value, 'a283e768fa12ef33087f07b01f82d6dd')
def test_bad_category(self):
attr = MispAttribute()
with self.assertRaises(ValueError):
attr.category = 'foobar'
def test_bad_threat_lvl(self):
attr = MispAttribute()
with self.assertRaises(ValueError):
attr.threat_level_id = 5
def test_bad_analysis(self):
attr = MispAttribute()
with self.assertRaises(ValueError):
attr.analysis = 5
def test_good_inner_attribute(self):
attr = MispAttribute()
def test_bad_types(self):
attr = MispAttribute()
with self.assertRaises(ValueError):
attr.type = 'foobar'
valid_types = ['AS', 'aba-rtn', 'anonymised', 'attachment',
'authentihash', 'bank-account-nr', 'bic', 'bin', 'boolean',
'bro', 'btc', 'campaign-id', 'campaign-name', 'cc-number',
'cdhash', 'comment', 'cookie', 'cortex', 'counter',
'country-of-residence', 'cpe', 'date-of-birth', 'datetime',
'dns-soa-email', 'domain', 'domain|ip', 'email-attachment',
'email-body', 'email-dst', 'email-dst-display-name',
'email-header', 'email-message-id', 'email-mime-boundary',
'email-reply-to', 'email-src', 'email-src-display-name',
'email-subject', 'email-thread-index', 'email-x-mailer',
'filename', 'filename|authentihash', 'filename|impfuzzy',
'filename|imphash', 'filename|md5', 'filename|pehash',
'filename|sha1', 'filename|sha224', 'filename|sha256',
'filename|sha384', 'filename|sha512', 'filename|sha512/224',
'filename|sha512/256', 'filename|ssdeep', 'filename|tlsh',
'first-name', 'float', 'frequent-flyer-number', 'gender',
'gene', 'github-organisation', 'github-repository',
'github-username', 'hassh-md5', 'hasshserver-md5', 'hex',
'hostname', 'hostname|port', 'http-method', 'iban',
'identity-card-number', 'impfuzzy', 'imphash', 'ip-dst',
'ip-dst|port', 'ip-src', 'ip-src|port', 'issue-date-of-the-visa',
'ja3-fingerprint-md5', 'jabber-id', 'last-name', 'link',
'mac-address', 'mac-eui-64', 'malware-sample', 'malware-type',
'md5', 'middle-name', 'mime-type', 'mobile-application-id',
'mutex', 'named', 'nationality', 'other',
'passenger-name-record-locator-number', 'passport-country',
'passport-expiration', 'passport-number', 'pattern-in-file',
'pattern-in-memory', 'pattern-in-traffic', 'payment-details',
'pdb', 'pehash', 'phone-number', 'place-of-birth',
'place-port-of-clearance', 'place-port-of-onward-foreign-destination',
'place-port-of-original-embarkation', 'port', 'primary-residence',
'prtn', 'redress-number', 'regkey', 'regkey|value', 'sha1',
'sha224', 'sha256', 'sha384', 'sha512', 'sha512/224',
'sha512/256', 'sigma', 'size-in-bytes', 'snort',
'special-service-request', 'ssdeep', 'stix2-pattern',
'target-email', 'target-external', 'target-location',
'target-machine', 'target-org', 'target-user', 'text',
'threat-actor', 'tlsh', 'travel-details', 'twitter-id', 'uri',
'url', 'user-agent', 'visa-number', 'vulnerability',
'whois-creation-date', 'whois-registrant-email', 'whois-registrant-name',
'whois-registrant-org', 'whois-registrant-phone', 'whois-registrar',
'windows-scheduled-task', 'windows-service-displayname',
'windows-service-name', 'x509-fingerprint-md5', 'x509-fingerprint-sha1',
'x509-fingerprint-sha256', 'xmr', 'yara', 'zeek']
for t in valid_types:
attr.type = t
class MispServerTest(unittest.TestCase):
def disabled_test_get_event(self):
m = MispServer()
evt = m.events.get(TEST_EVT_ID)
self.assertEqual(evt.id, TEST_EVT_ID)
def disabled_test_search_event(self):
m = MispServer()
evt=m.events.search(value=TEST_NEEDLE)
self.assertEqual(len(evt), 1)
self.assertEqual(evt[0].id, TEST_EVT_ID)
ok=False
for event in evt:
for attr in event.attributes:
if attr.value == TEST_NEEDLE:
ok=True
break
self.assertEqual(ok, True)
def disabled_test_last(self):
m = MispServer()
self.assertEqual(m.events.last().id, TEST_LAST_EVT_ID)
def disabled_test_create_event(self):
m = MispServer()
e = MispEvent()
e.info = 'Hello world'
e.orgc = DEFAULT_ORGC
e.org = DEFAULT_ORG
e.published = 0
e.distribution = 0
m.events.put(e)
def disabled_test_modify_event(self):
m = MispServer()
e = m.events.get(TEST_EVT_ID)
e.timestamp = datetime.datetime.now()
a = MispAttribute()
a.value = 'foobar%d.com' % time.time()
a.comment = 'evil domain'
a.category = 'Network activity'
a.type = 'domain'
e.attributes.add(a)
m.events.update(e)
def disabled_test_modify_attr(self):
m = MispServer()
event = m.events.get(TEST_EVT_ID)
updateme = None
for attr in event.attributes:
if str(attr.value).startswith('tata'):
updateme = attr
break
self.assertIsNotNone(updateme)
updateme.comment = 'Hello; %s' % datetime.datetime.now()
m.attributes.update(updateme)
class MispTransportErrorTest(unittest.TestCase):
def test_python3_bug(self):
err = MispTransportError('POST %s: returned status=%d', '/stuff', 404)
self.assertEqual(err.path, '/stuff')
self.assertEqual(err.status_code, 404)
try:
self.assertEqual(err[2], 404)
except TypeError:
# That's ok it means you are testing with python 3
pass
self.assertEqual(err.args[2], 404)
class MispObjectTest(unittest.TestCase):
def test_from_xml(self):
xml = """<Object>
<id>1234</id>
<name>file</name>
<meta-category>file</meta-category>
<description>File object describing a file with meta-information</description>
<template_uuid>688c46fb-5edb-40a3-8273-1af7923e2215</template_uuid>
<template_version>13</template_version>
<event_id>9876</event_id>
<uuid>5c9c8b6f-bb24-4e6c-ab83-18c60a3a5cf9</uuid>
<timestamp>1553763183</timestamp>
<distribution>1</distribution>
<sharing_group_id>0</sharing_group_id>
<comment>Hello</comment>
<deleted>0</deleted>
<ObjectReference/>
<Attribute>
<id>2640682</id>
<type>malware-sample</type>
<category>Payload installation</category>
<to_ids>1</to_ids>
<uuid>5c9c8b70-4814-493b-a891-18c60a3a5cf9</uuid>
<event_id>14584</event_id>
<distribution>1</distribution>
<timestamp>1553763184</timestamp>
<comment/>
<sharing_group_id>0</sharing_group_id>
<deleted>0</deleted>
<disable_correlation>0</disable_correlation>
<object_id>292731</object_id>
<object_relation>malware-sample</object_relation>
<value>/tmp/a.exe|d41d8cd98f00b204e9800998ecf8427e</value>
<Galaxy/>
<data>abcdef</data>
<ShadowAttribute/>
</Attribute>
<Attribute>
<id>2640683</id>
<type>filename</type>
<category>Payload installation</category>
<to_ids>0</to_ids>
<uuid>5c9c8b73-0418-474f-a2ee-18c60a3a5cf9</uuid>
<event_id>14584</event_id>
<distribution>1</distribution>
<timestamp>1553763187</timestamp>
<comment/>
<sharing_group_id>0</sharing_group_id>
<deleted>0</deleted>
<disable_correlation>0</disable_correlation>
<object_id>292731</object_id>
<object_relation>filename</object_relation>
<value>/tmp/a.exe</value>
<Galaxy/>
<ShadowAttribute/>
</Attribute>
</Object>"""
obj = MispObject.from_xml(xml)
self.assertEqual(obj.id, 1234)
self.assertEqual(obj.name, "file")
self.assertEqual(obj.comment, "Hello")
self.assertEqual(obj.event_id, 9876)
self.assertEqual(obj.timestamp, 1553763183)
self.assertEqual(obj.meta_category, "file")
self.assertEqual(len(obj.attributes), 2)
if __name__ == '__main__':
unittest.main()
|
nbareil/python-misp
|
misp_test.py
|
Python
|
apache-2.0
| 18,323
|
[
"Galaxy"
] |
1ee661c2a1a8b7df4cb9349f2b82303929853397fbcbc652fcc28943890062d5
|
# Copyright 2006, 2007 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Bio.SeqIO support for the "clustal" (aka ClustalW) file format.
You are expected to use this module via the Bio.SeqIO functions."""
#For reading alignments:
from Bio.Alphabet import single_letter_alphabet
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
#For writing alignments:
from Bio.SeqIO.Interfaces import SequenceWriter
from Bio.Clustalw import ClustalAlignment
#This is a generator function!
#TODO - Should the default be Gapped(single_letter_alphabet) instead?
def ClustalIterator(handle, alphabet = single_letter_alphabet) :
"""Reads a Clustalw file returning a SeqRecord object iterator
The entire file is loaded at once, but the SeqRecord objects
are only created "on request".
For more information on the file format, please see:
http://www.bioperl.org/wiki/ClustalW_multiple_alignment_format
You might like to look at Bio.Clustalw which has an interface
to the command line tool clustalw, and can also clustal alignment
files into Bio.Clustalw.ClustalAlignment objects.
We call this the "clustal" format which is consistent with EMBOSS.
Sadly BioPerl calls it the "clustalw" format, so we can't match
them both.
"""
line = handle.readline()
if not line: return
if not line[:7] == 'CLUSTAL':
raise ValueError("Did not find CLUSTAL header")
#There should be two blank lines after the header line
line = handle.readline()
while line.strip() == "" :
line = handle.readline()
#If the alignment contains entries with the same sequence
#identifier (not a good idea - but seems possible), then this
#dictionary based parser will merge their sequences. Fix this?
ids = []
seqs = []
#Use the first block to get the sequence identifiers
while line.strip() <> "" :
if line[0] <> " " :
#Sequences identifier...
fields = line.rstrip().split()
#We expect there to be two fields, there can be an optional
#"sequence number" field containing the letter count.
if len(fields) < 2 or len(fields) > 3:
raise ValueError("Could not parse line:\n%s" % line)
ids.append(fields[0])
seqs.append(fields[1])
if len(fields) == 3 :
#This MAY be an old style file with a letter count...
try :
letters = int(fields[2])
except ValueError :
raise ValueError("Could not parse line, bad sequence number:\n%s" % line)
if len(fields[1].replace("-","")) <> letters :
raise ValueError("Could not parse line, invalid sequence number:\n%s" % line)
else :
#Sequence consensus line...
pass
line = handle.readline()
if not line : break #end of file
assert line.strip() == ""
#Loop over any remaining blocks...
while True :
#There should be a blank line between each block.
#Also want to ignore any consensus line from the
#previous block.
while (not line) or line.strip() == "" or line[0]==" ":
line = handle.readline()
if not line : break # end of file
if not line : break # end of file
for i in range(len(ids)) :
fields = line.rstrip().split()
#We expect there to be two fields, there can be an optional
#"sequence number" field containing the letter count.
if len(fields) < 2 or len(fields) > 3:
raise ValueError("Could not parse line:\n%s" % line)
if fields[0] <> ids[i] :
raise ValueError("Identifiers out of order? Got '%s' but expected '%s'" \
% (fields[0], ids[i]))
#Append the sequence
seqs[i] += fields[1]
if len(fields) == 3 :
#This MAY be an old style file with a letter count...
try :
letters = int(fields[2])
except ValueError :
raise ValueError("Could not parse line, bad sequence number:\n%s" % line)
if len(seqs[i].replace("-","")) <> letters :
raise ValueError("Could not parse line, invalid sequence number:\n%s" % line)
#Read in the next line
line = handle.readline()
assert len(ids) == len(seqs)
alignment_length = len(seqs[0])
for i in range(len(ids)) :
if len(seqs[i]) <> alignment_length:
raise ValueError("Error parsing alignment - sequences of different length?")
yield SeqRecord(Seq(seqs[i], alphabet), id=ids[i])
class ClustalWriter(SequenceWriter):
"""Write Clustal sequence alignments"""
def __init__(self, handle):
"""Creates the writer object
Use the method write_file() to actually record your sequence records."""
self.handle = handle
def write_file(self, records) :
"""Use this to write an entire file containing the given records.
records - a SeqRecord iterator, or list of SeqRecords
Right now this code uses Bio.Clustalw.ClustalAlignment to do
the hard work - this may change in the future.
"""
# ToDo - decide if using Bio.Clustalw.ClustalAlignment is
# actually the best way to handle this.
#
# Copying that thirty lines of code (with slight tweaks)
# would be much simpler, and would probably run quicker and
# use less memory as it doesn't build a ClustalAlignment
# object.
#
# The downside is code duplication.
length_of_sequences = None
alignment = ClustalAlignment()
for record in records :
if length_of_sequences is None :
length_of_sequences = len(record.seq)
elif length_of_sequences <> len(record.seq) :
raise ValueError("Sequences must all be the same length")
if length_of_sequences <= 0 :
raise ValueError("Non-empty sequences are required")
#ToDo, check alphabet for this sequence matches that
#specified for the alignment. Not sure how the
#alphabet.contains() method is intended to be used,
#but it doesn't make sense to me right now.
#Doing this works, but ClustalAlignment currently uses
#the record.descrption when outputing the records.
#alignment._records.append(record)
#Make sure we don't get any spaces in the record
#identifier when output in the file by replacing
#them with underscores:
alignment.add_sequence(record.id.replace(" ","_"),
record.seq.tostring())
if len(alignment.get_all_seqs()) == 0 :
raise ValueError("Must have at least one sequence")
self.handle.write(str(alignment))
#Don't close the handle. Doing so would prevent this code
#from writing concatenated Clustal files which might be used
#in phylogenetic bootstrapping (very common with phylip).
#self.handle.close()
if __name__ == "__main__" :
# Run a quick self-test
#This is a truncated version of the example in Tests/cw02.aln
#Notice the inclusion of sequence numbers (right hand side)
aln_example1 = \
"""CLUSTAL W (1.81) multiple sequence alignment
gi|4959044|gb|AAD34209.1|AF069 MENSDSNDKGSDQSAAQRRSQMDRLDREEAFYQFVNNLSEEDYRLMRDNN 50
gi|671626|emb|CAA85685.1| ---------MSPQTETKASVGFKAGVKEYKLTYYTPEYETKDTDILAAFR 41
* *: :: :. :* : :. : . :* :: .
gi|4959044|gb|AAD34209.1|AF069 LLGTPGESTEEELLRRLQQIKEGPPPQSPDENRAGESSDDVTNSDSIIDW 100
gi|671626|emb|CAA85685.1| VTPQPG-----------------VPPEEAGAAVAAESSTGT--------- 65
: ** **:... *.*** ..
gi|4959044|gb|AAD34209.1|AF069 LNSVRQTGNTTRSRQRGNQSWRAVSRTNPNSGDFRFSLEINVNRNNGSQT 150
gi|671626|emb|CAA85685.1| WTTVWTDGLTSLDRYKG-----RCYHIEPVPG------------------ 92
.:* * *: .* :* : :* .*
gi|4959044|gb|AAD34209.1|AF069 SENESEPSTRRLSVENMESSSQRQMENSASESASARPSRAERNSTEAVTE 200
gi|671626|emb|CAA85685.1| -EKDQCICYVAYPLDLFEEGSVTNMFTSIVGNVFGFKALRALRLEDLRIP 141
*::. . .:: :*..* :* .* .. . : . :
gi|4959044|gb|AAD34209.1|AF069 VPTTRAQRRA 210
gi|671626|emb|CAA85685.1| VAYVKTFQGP 151
*. .:: : .
"""
#This example is a truncated version of the dataset used here:
#http://virgil.ruc.dk/kurser/Sekvens/Treedraw.htm
#with the last record repeated twice (deliberate toture test)
aln_example2 = \
"""CLUSTAL X (1.83) multiple sequence alignment
V_Harveyi_PATH --MKNWIKVAVAAIA--LSAA------------------TVQAATEVKVG
B_subtilis_YXEM MKMKKWTVLVVAALLAVLSACG------------NGNSSSKEDDNVLHVG
B_subtilis_GlnH_homo_YCKK MKKALLALFMVVSIAALAACGAGNDNQSKDNAKDGDLWASIKKKGVLTVG
YA80_HAEIN MKKLLFTTALLTGAIAFSTF-----------SHAGEIADRVEKTKTLLVG
FLIY_ECOLI MKLAHLGRQALMGVMAVALVAG---MSVKSFADEG-LLNKVKERGTLLVG
E_coli_GlnH --MKSVLKVSLAALTLAFAVS------------------SHAADKKLVVA
Deinococcus_radiodurans -MKKSLLSLKLSGLLVPSVLALS--------LSACSSPSSTLNQGTLKIA
HISJ_E_COLI MKKLVLSLSLVLAFSSATAAF-------------------AAIPQNIRIG
HISJ_E_COLI MKKLVLSLSLVLAFSSATAAF-------------------AAIPQNIRIG
: . : :.
V_Harveyi_PATH MSGRYFPFTFVKQ--DKLQGFEVDMWDEIGKRNDYKIEYVTANFSGLFGL
B_subtilis_YXEM ATGQSYPFAYKEN--GKLTGFDVEVMEAVAKKIDMKLDWKLLEFSGLMGE
B_subtilis_GlnH_homo_YCKK TEGTYEPFTYHDKDTDKLTGYDVEVITEVAKRLGLKVDFKETQWGSMFAG
YA80_HAEIN TEGTYAPFTFHDK-SGKLTGFDVEVIRKVAEKLGLKVEFKETQWDAMYAG
FLIY_ECOLI LEGTYPPFSFQGD-DGKLTGFEVEFAQQLAKHLGVEASLKPTKWDGMLAS
E_coli_GlnH TDTAFVPFEFKQG--DKYVGFDVDLWAAIAKELKLDYELKPMDFSGIIPA
Deinococcus_radiodurans MEGTYPPFTSKNE-QGELVGFDVDIAKAVAQKLNLKPEFVLTEWSGILAG
HISJ_E_COLI TDPTYAPFESKNS-QGELVGFDIDLAKELCKRINTQCTFVENPLDALIPS
HISJ_E_COLI TDPTYAPFESKNS-QGELVGFDIDLAKELCKRINTQCTFVENPLDALIPS
** .: *::::. : :. . ..:
V_Harveyi_PATH LETGRIDTISNQITMTDARKAKYLFADPYVVDG-AQI
B_subtilis_YXEM LQTGKLDTISNQVAVTDERKETYNFTKPYAYAG-TQI
B_subtilis_GlnH_homo_YCKK LNSKRFDVVANQVG-KTDREDKYDFSDKYTTSR-AVV
YA80_HAEIN LNAKRFDVIANQTNPSPERLKKYSFTTPYNYSG-GVI
FLIY_ECOLI LDSKRIDVVINQVTISDERKKKYDFSTPYTISGIQAL
E_coli_GlnH LQTKNVDLALAGITITDERKKAIDFSDGYYKSG-LLV
Deinococcus_radiodurans LQANKYDVIVNQVGITPERQNSIGFSQPYAYSRPEII
HISJ_E_COLI LKAKKIDAIMSSLSITEKRQQEIAFTDKLYAADSRLV
HISJ_E_COLI LKAKKIDAIMSSLSITEKRQQEIAFTDKLYAADSRLV
*.: . * . * *: :
"""
from StringIO import StringIO
records = list(ClustalIterator(StringIO(aln_example1)))
assert 2 == len(records)
assert records[0].id == "gi|4959044|gb|AAD34209.1|AF069"
assert records[1].id == "gi|671626|emb|CAA85685.1|"
assert records[0].seq.tostring() == \
"MENSDSNDKGSDQSAAQRRSQMDRLDREEAFYQFVNNLSEEDYRLMRDNN" + \
"LLGTPGESTEEELLRRLQQIKEGPPPQSPDENRAGESSDDVTNSDSIIDW" + \
"LNSVRQTGNTTRSRQRGNQSWRAVSRTNPNSGDFRFSLEINVNRNNGSQT" + \
"SENESEPSTRRLSVENMESSSQRQMENSASESASARPSRAERNSTEAVTE" + \
"VPTTRAQRRA"
records = list(ClustalIterator(StringIO(aln_example2)))
assert 9 == len(records)
assert records[-1].id == "HISJ_E_COLI"
assert records[-1].seq.tostring() == \
"MKKLVLSLSLVLAFSSATAAF-------------------AAIPQNIRIG" + \
"TDPTYAPFESKNS-QGELVGFDIDLAKELCKRINTQCTFVENPLDALIPS" + \
"LKAKKIDAIMSSLSITEKRQQEIAFTDKLYAADSRLV"
|
dbmi-pitt/DIKB-Micropublication
|
scripts/mp-scripts/Bio/SeqIO/ClustalIO.py
|
Python
|
apache-2.0
| 12,544
|
[
"BioPerl",
"Biopython"
] |
97ca203ecbe8e668b9dce505fbc153bc07138bac3eac4223eea9e732057115ca
|
#!/usr/bin/env python
# encoding: utf-8
"""Traverse a directory tree.
"""
import os
import os.path
import pprint
def visit(arg, dirname, names):
print dirname, arg
for name in names:
subname = os.path.join(dirname, name)
if os.path.isdir(subname):
print ' %s/' % name
else:
print ' %s' % name
print
os.mkdir('example')
os.mkdir('example/one')
f = open('example/one/file.txt', 'wt')
f.write('contents')
f.close()
f = open('example/two.txt', 'wt')
f.write('contents')
f.close()
os.path.walk('example', visit, '(User data)')
|
Akagi201/learning-python
|
ospath/ospath_walk.py
|
Python
|
mit
| 590
|
[
"VisIt"
] |
2c485e09ceaa877bc5cd95c2dd74c45dfe6c62f4c2d109ab93223a27c8ce2ce4
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import os
import json
import warnings
import numpy as np
from pymatgen import Lattice, Structure, Specie, Molecule
from pymatgen.transformations.standard_transformations import \
OxidationStateDecorationTransformation, SubstitutionTransformation, \
OrderDisorderedStructureTransformation, AutoOxiStateDecorationTransformation
from pymatgen.transformations.advanced_transformations import \
SuperTransformation, EnumerateStructureTransformation, \
MultipleSubstitutionTransformation, ChargeBalanceTransformation, \
SubstitutionPredictorTransformation, MagOrderingTransformation, \
DopingTransformation, _find_codopant, SlabTransformation, \
MagOrderParameterConstraint, DisorderOrderedTransformation, \
GrainBoundaryTransformation, CubicSupercellTransformation, \
AddAdsorbateTransformation, SubstituteSurfaceSiteTransformation, \
SQSTransformation
from monty.os.path import which
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.cif import CifParser
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.analysis.energy_models import IsingModel
from pymatgen.analysis.gb.grain import GrainBoundaryGenerator
from pymatgen.util.testing import PymatgenTest
from pymatgen.core.surface import SlabGenerator
from pymatgen.io import atat
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
def get_table():
"""
Loads a lightweight lambda table for use in unit tests to reduce
initialization time, and make unit tests insensitive to changes in the
default lambda table.
"""
data_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files', 'struct_predictor')
json_file = os.path.join(data_dir, 'test_lambda.json')
with open(json_file) as f:
lambda_table = json.load(f)
return lambda_table
enum_cmd = which('enum.x') or which('multienum.x')
makestr_cmd = which('makestr.x') or which('makeStr.x') or which('makeStr.py')
mcsqs_cmd = which('mcsqs')
enumlib_present = enum_cmd and makestr_cmd
class SuperTransformationTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_apply_transformation(self):
tl = [SubstitutionTransformation({"Li+": "Na+"}),
SubstitutionTransformation({"Li+": "K+"})]
t = SuperTransformation(tl)
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([.5, .5, .5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "Li+", "Li+",
"O2-", "O2-"], coords)
s = t.apply_transformation(struct, return_ranked_list=True)
for s_and_t in s:
self.assertEqual(s_and_t['transformation']
.apply_transformation(struct),
s_and_t['structure'])
@unittest.skipIf(not enumlib_present, "enum_lib not present.")
def test_apply_transformation_mult(self):
# Test returning multiple structures from each transformation.
disord = Structure(np.eye(3) * 4.209, [{"Cs+": 0.5, "K+": 0.5}, "Cl-"],
[[0, 0, 0], [0.5, 0.5, 0.5]])
disord.make_supercell([2, 2, 1])
tl = [EnumerateStructureTransformation(),
OrderDisorderedStructureTransformation()]
t = SuperTransformation(tl, nstructures_per_trans=10)
self.assertEqual(len(t.apply_transformation(disord,
return_ranked_list=20)), 8)
t = SuperTransformation(tl)
self.assertEqual(len(t.apply_transformation(disord,
return_ranked_list=20)), 2)
class MultipleSubstitutionTransformationTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_apply_transformation(self):
sub_dict = {1: ["Na", "K"]}
t = MultipleSubstitutionTransformation("Li+", 0.5, sub_dict, None)
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
self.assertEqual(len(t.apply_transformation(struct,
return_ranked_list=True)),
2)
class ChargeBalanceTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = ChargeBalanceTransformation('Li+')
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([.5, .5, .5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "Li+", "Li+",
"O2-", "O2-"], coords)
s = t.apply_transformation(struct)
self.assertAlmostEqual(s.charge, 0, 5)
@unittest.skipIf(not enumlib_present, "enum_lib not present.")
class EnumerateStructureTransformationTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_apply_transformation(self):
enum_trans = EnumerateStructureTransformation(refine_structure=True)
enum_trans2 = EnumerateStructureTransformation(refine_structure=True,
sort_criteria="nsites")
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
struct = p.structure
expected_ans = [1, 3, 1]
for i, frac in enumerate([0.25, 0.5, 0.75]):
trans = SubstitutionTransformation({'Fe': {'Fe': frac}})
s = trans.apply_transformation(struct)
oxitrans = OxidationStateDecorationTransformation(
{'Li': 1, 'Fe': 2, 'P': 5, 'O': -2})
s = oxitrans.apply_transformation(s)
alls = enum_trans.apply_transformation(s, 100)
self.assertEqual(len(alls), expected_ans[i])
self.assertIsInstance(trans.apply_transformation(s), Structure)
for ss in alls:
self.assertIn("energy", ss)
alls = enum_trans2.apply_transformation(s, 100)
self.assertEqual(len(alls), expected_ans[i])
self.assertIsInstance(trans.apply_transformation(s), Structure)
for ss in alls:
self.assertIn("num_sites", ss)
# make sure it works for non-oxidation state decorated structure
trans = SubstitutionTransformation({'Fe': {'Fe': 0.5}})
s = trans.apply_transformation(struct)
alls = enum_trans.apply_transformation(s, 100)
self.assertEqual(len(alls), 3)
self.assertIsInstance(trans.apply_transformation(s), Structure)
for s in alls:
self.assertNotIn("energy", s)
def test_max_disordered_sites(self):
l = Lattice.cubic(4)
s_orig = Structure(l, [{"Li": 0.2, "Na": 0.2, "K": 0.6}, {"O": 1}],
[[0, 0, 0], [0.5, 0.5, 0.5]])
est = EnumerateStructureTransformation(max_cell_size=None,
max_disordered_sites=5)
dd = est.apply_transformation(s_orig, return_ranked_list=100)
self.assertEqual(len(dd), 9)
for d in dd:
self.assertEqual(len(d["structure"]), 10)
def test_to_from_dict(self):
trans = EnumerateStructureTransformation()
d = trans.as_dict()
trans = EnumerateStructureTransformation.from_dict(d)
self.assertEqual(trans.symm_prec, 0.1)
class SubstitutionPredictorTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = SubstitutionPredictorTransformation(threshold=1e-3, alpha=-5,
lambda_table=get_table())
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ['O2-', 'Li1+', 'Li1+'], coords)
outputs = t.apply_transformation(struct, return_ranked_list=True)
self.assertEqual(len(outputs), 4, 'incorrect number of structures')
def test_as_dict(self):
t = SubstitutionPredictorTransformation(threshold=2, alpha=-2,
lambda_table=get_table())
d = t.as_dict()
t = SubstitutionPredictorTransformation.from_dict(d)
self.assertEqual(t.threshold, 2,
'incorrect threshold passed through dict')
self.assertEqual(t._substitutor.p.alpha, -2,
'incorrect alpha passed through dict')
@unittest.skipIf(not enumlib_present, "enum_lib not present.")
class MagOrderingTransformationTest(PymatgenTest):
def setUp(self):
latt = Lattice.cubic(4.17)
species = ["Ni", "O"]
coords = [[0, 0, 0],
[0.5, 0.5, 0.5]]
self.NiO = Structure.from_spacegroup(225, latt, species, coords)
latt = Lattice([[2.085, 2.085, 0.0],
[0.0, -2.085, -2.085],
[-2.085, 2.085, -4.17]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0, 0.5],
[0, 0, 0],
[0.25, 0.5, 0.25],
[0.75, 0.5, 0.75]]
self.NiO_AFM_111 = Structure(latt, species, coords)
self.NiO_AFM_111.add_spin_by_site([-5, 5, 0, 0])
latt = Lattice([[2.085, 2.085, 0],
[0, 0, -4.17],
[-2.085, 2.085, 0]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0.5, 0.5],
[0, 0, 0],
[0, 0.5, 0],
[0.5, 0, 0.5]]
self.NiO_AFM_001 = Structure(latt, species, coords)
self.NiO_AFM_001.add_spin_by_site([-5, 5, 0, 0])
parser = CifParser(os.path.join(test_dir, 'Fe3O4.cif'))
self.Fe3O4 = parser.get_structures()[0]
trans = AutoOxiStateDecorationTransformation()
self.Fe3O4_oxi = trans.apply_transformation(self.Fe3O4)
parser = CifParser(os.path.join(test_dir, 'Li8Fe2NiCoO8.cif'))
self.Li8Fe2NiCoO8 = parser.get_structures()[0]
self.Li8Fe2NiCoO8.remove_oxidation_states()
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_apply_transformation(self):
trans = MagOrderingTransformation({"Fe": 5})
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
s = p.structure
alls = trans.apply_transformation(s, 10)
self.assertEqual(len(alls), 3)
f = SpacegroupAnalyzer(alls[0]["structure"], 0.1)
self.assertEqual(f.get_space_group_number(), 31)
model = IsingModel(5, 5)
trans = MagOrderingTransformation({"Fe": 5},
energy_model=model)
alls2 = trans.apply_transformation(s, 10)
# Ising model with +J penalizes similar neighbor magmom.
self.assertNotEqual(alls[0]["structure"], alls2[0]["structure"])
self.assertEqual(alls[0]["structure"], alls2[2]["structure"])
s = self.get_structure('Li2O')
# Li2O doesn't have magnetism of course, but this is to test the
# enumeration.
trans = MagOrderingTransformation({"Li+": 1}, max_cell_size=3)
alls = trans.apply_transformation(s, 100)
# TODO: check this is correct, unclear what len(alls) should be
self.assertEqual(len(alls), 12)
trans = MagOrderingTransformation({"Ni": 5})
alls = trans.apply_transformation(self.NiO.get_primitive_structure(),
return_ranked_list=10)
self.assertArrayAlmostEqual(self.NiO_AFM_111.lattice.parameters,
alls[0]["structure"].lattice.parameters)
self.assertArrayAlmostEqual(self.NiO_AFM_001.lattice.parameters,
alls[1]["structure"].lattice.parameters)
def test_ferrimagnetic(self):
trans = MagOrderingTransformation({"Fe": 5}, order_parameter=0.75,
max_cell_size=1)
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
s = p.structure
a = SpacegroupAnalyzer(s, 0.1)
s = a.get_refined_structure()
alls = trans.apply_transformation(s, 10)
self.assertEqual(len(alls), 1)
def test_as_from_dict(self):
trans = MagOrderingTransformation({"Fe": 5}, order_parameter=0.75)
d = trans.as_dict()
# Check json encodability
s = json.dumps(d)
trans = MagOrderingTransformation.from_dict(d)
self.assertEqual(trans.mag_species_spin, {"Fe": 5})
from pymatgen.analysis.energy_models import SymmetryModel
self.assertIsInstance(trans.energy_model, SymmetryModel)
def test_zero_spin_case(self):
# ensure that zero spin case maintains sites and formula
s = self.get_structure('Li2O')
trans = MagOrderingTransformation({"Li+": 0.0}, order_parameter=0.5)
alls = trans.apply_transformation(s)
Li_site = alls.indices_from_symbol('Li')[0]
# Ensure s does not have a spin property
self.assertFalse('spin' in s.sites[Li_site].specie._properties)
# ensure sites are assigned a spin property in alls
self.assertTrue('spin' in alls.sites[Li_site].specie._properties)
self.assertEqual(alls.sites[Li_site].specie._properties['spin'], 0)
def test_advanced_usage(self):
# test spin on just one oxidation state
magtypes = {"Fe2+": 5}
trans = MagOrderingTransformation(magtypes)
alls = trans.apply_transformation(self.Fe3O4_oxi)
self.assertIsInstance(alls, Structure)
self.assertEqual(str(alls[0].specie), "Fe2+,spin=5")
self.assertEqual(str(alls[2].specie), "Fe3+")
# test multiple order parameters
# this should only order on Fe3+ site, but assign spin to both
magtypes = {"Fe2+": 5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(1, species_constraints="Fe2+"),
MagOrderParameterConstraint(0.5, species_constraints="Fe3+")
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi)
# using this 'sorted' syntax because exact order of sites in first
# returned structure varies between machines: we just want to ensure
# that the order parameter is accurate
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=5", "Fe2+,spin=5"]))
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=5",
"Fe3+,spin=-5", "Fe3+,spin=-5"]))
self.assertEqual(str(alls[0].specie), "Fe2+,spin=5")
# this should give same results as previously
# but with opposite sign on Fe2+ site
magtypes = {"Fe2+": -5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(1, species_constraints="Fe2+"),
MagOrderParameterConstraint(0.5, species_constraints="Fe3+")
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi)
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=-5", "Fe2+,spin=-5"]))
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=5",
"Fe3+,spin=-5", "Fe3+,spin=-5"]))
# while this should order on both sites
magtypes = {"Fe2+": 5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(0.5, species_constraints="Fe2+"),
MagOrderParameterConstraint(0.25, species_constraints="Fe3+")
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi)
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=5", "Fe2+,spin=-5"]))
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=-5",
"Fe3+,spin=-5", "Fe3+,spin=-5"]))
# add coordination numbers to our test case
# don't really care what these are for the test case
cns = [6, 6, 6, 6, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0]
self.Fe3O4.add_site_property('cn', cns)
# this should give FM ordering on cn=4 sites, and AFM ordering on cn=6 sites
magtypes = {"Fe": 5}
order_parameters = [
MagOrderParameterConstraint(0.5, species_constraints="Fe",
site_constraint_name="cn", site_constraints=6),
MagOrderParameterConstraint(1.0, species_constraints="Fe",
site_constraint_name="cn", site_constraints=4)
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4)
alls.sort(key=lambda x: x.properties['cn'], reverse=True)
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(0, 4)]),
sorted(["Fe,spin=-5", "Fe,spin=-5",
"Fe,spin=5", "Fe,spin=5"]))
self.assertEqual(sorted([str(alls[idx].specie) for idx in range(4, 6)]),
sorted(["Fe,spin=5", "Fe,spin=5"]))
# now ordering on both sites, equivalent to order_parameter = 0.5
magtypes = {"Fe2+": 5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(0.5, species_constraints="Fe2+"),
MagOrderParameterConstraint(0.5, species_constraints="Fe3+")
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi, return_ranked_list=10)
struct = alls[0]["structure"]
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=5", "Fe2+,spin=-5"]))
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=-5",
"Fe3+,spin=-5", "Fe3+,spin=5"]))
self.assertEqual(len(alls), 4)
# now mixed orderings where neither are equal or 1
magtypes = {"Fe2+": 5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(0.5, species_constraints="Fe2+"),
MagOrderParameterConstraint(0.25, species_constraints="Fe3+")
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi, return_ranked_list=100)
struct = alls[0]["structure"]
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=5", "Fe2+,spin=-5"]))
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=-5",
"Fe3+,spin=-5", "Fe3+,spin=-5"]))
self.assertEqual(len(alls), 2)
# now order on multiple species
magtypes = {"Fe2+": 5, "Fe3+": 5}
order_parameters = [
MagOrderParameterConstraint(0.5, species_constraints=["Fe2+", "Fe3+"]),
]
trans = MagOrderingTransformation(magtypes, order_parameter=order_parameters)
alls = trans.apply_transformation(self.Fe3O4_oxi, return_ranked_list=10)
struct = alls[0]["structure"]
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(0, 2)]),
sorted(["Fe2+,spin=5", "Fe2+,spin=-5"]))
self.assertEqual(sorted([str(struct[idx].specie) for idx in range(2, 6)]),
sorted(["Fe3+,spin=5", "Fe3+,spin=-5",
"Fe3+,spin=-5", "Fe3+,spin=5"]))
self.assertEqual(len(alls), 6)
@unittest.skipIf(not enumlib_present, "enum_lib not present.")
class DopingTransformationTest(PymatgenTest):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_apply_transformation(self):
structure = PymatgenTest.get_structure("LiFePO4")
a = SpacegroupAnalyzer(structure, 0.1)
structure = a.get_refined_structure()
t = DopingTransformation("Ca2+", min_length=10)
ss = t.apply_transformation(structure, 100)
self.assertEqual(len(ss), 1)
t = DopingTransformation("Al3+", min_length=15, ionic_radius_tol=0.1)
ss = t.apply_transformation(structure, 100)
self.assertEqual(len(ss), 0)
# Aliovalent doping with vacancies
for dopant, nstructures in [("Al3+", 2), ("N3-", 235), ("Cl-", 8)]:
t = DopingTransformation(dopant, min_length=4, alio_tol=1,
max_structures_per_enum=1000)
ss = t.apply_transformation(structure, 1000)
self.assertEqual(len(ss), nstructures)
for d in ss:
self.assertEqual(d["structure"].charge, 0)
# Aliovalent doping with codopant
for dopant, nstructures in [("Al3+", 3), ("N3-", 37), ("Cl-", 37)]:
t = DopingTransformation(dopant, min_length=4, alio_tol=1,
codopant=True,
max_structures_per_enum=1000)
ss = t.apply_transformation(structure, 1000)
self.assertEqual(len(ss), nstructures)
for d in ss:
self.assertEqual(d["structure"].charge, 0)
# Make sure compensation is done with lowest oxi state
structure = PymatgenTest.get_structure("SrTiO3")
t = DopingTransformation("Nb5+", min_length=5, alio_tol=1,
max_structures_per_enum=1000,
allowed_doping_species=["Ti4+"])
ss = t.apply_transformation(structure, 1000)
self.assertEqual(len(ss), 3)
for d in ss:
self.assertEqual(d["structure"].formula, "Sr7 Ti6 Nb2 O24")
def test_as_from_dict(self):
trans = DopingTransformation("Al3+", min_length=5, alio_tol=1,
codopant=False, max_structures_per_enum=1)
d = trans.as_dict()
# Check json encodability
s = json.dumps(d)
trans = DopingTransformation.from_dict(d)
self.assertEqual(str(trans.dopant), "Al3+")
self.assertEqual(trans.max_structures_per_enum, 1)
def test_find_codopant(self):
self.assertEqual(_find_codopant(Specie("Fe", 2), 1), Specie("Cu", 1))
self.assertEqual(_find_codopant(Specie("Fe", 2), 3), Specie("In", 3))
class SlabTransformationTest(PymatgenTest):
def test_apply_transformation(self):
s = self.get_structure("LiFePO4")
trans = SlabTransformation([0, 0, 1], 10, 10, shift=0.25)
gen = SlabGenerator(s, [0, 0, 1], 10, 10)
slab_from_gen = gen.get_slab(0.25)
slab_from_trans = trans.apply_transformation(s)
self.assertArrayAlmostEqual(slab_from_gen.lattice.matrix,
slab_from_trans.lattice.matrix)
self.assertArrayAlmostEqual(slab_from_gen.cart_coords,
slab_from_trans.cart_coords)
fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"],
[[0, 0, 0]])
trans = SlabTransformation([1, 1, 1], 10, 10)
slab_from_trans = trans.apply_transformation(fcc)
gen = SlabGenerator(fcc, [1, 1, 1], 10, 10)
slab_from_gen = gen.get_slab()
self.assertArrayAlmostEqual(slab_from_gen.lattice.matrix,
slab_from_trans.lattice.matrix)
self.assertArrayAlmostEqual(slab_from_gen.cart_coords,
slab_from_trans.cart_coords)
class GrainBoundaryTransformationTest(PymatgenTest):
def test_apply_transformation(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
Al_bulk = Structure.from_spacegroup("Fm-3m", Lattice.cubic(2.8575585),
["Al"], [[0, 0, 0]])
gb_gen_params_s5 = {"rotation_axis": [1, 0, 0], "rotation_angle": 53.13010235415599,
"expand_times": 3, "vacuum_thickness": 0.0, "normal": True,
"plane": [0, -1, -3], 'rm_ratio': 0.6}
gbg = GrainBoundaryGenerator(Al_bulk)
gb_from_generator = gbg.gb_from_parameters(**gb_gen_params_s5)
gbt_s5 = GrainBoundaryTransformation(**gb_gen_params_s5)
gb_from_trans = gbt_s5.apply_transformation(Al_bulk)
self.assertArrayAlmostEqual(gb_from_generator.lattice.matrix,
gb_from_trans.lattice.matrix)
self.assertArrayAlmostEqual(gb_from_generator.cart_coords,
gb_from_trans.cart_coords)
class DisorderedOrderedTransformationTest(PymatgenTest):
def test_apply_transformation(self):
# non-sensical example just for testing purposes
struct = self.get_structure('BaNiO3')
trans = DisorderOrderedTransformation()
output = trans.apply_transformation(struct)
self.assertFalse(output.is_ordered)
self.assertDictEqual(output[-1].species.as_dict(),
{'Ni': 0.5, 'Ba': 0.5})
@unittest.skipIf(not mcsqs_cmd, "mcsqs not present.")
class SQSTransformationTest(PymatgenTest):
def test_apply_transformation(self):
# non-sensical example just for testing purposes
self.pztstrings = np.load(os.path.join(test_dir, "mcsqs/pztstrings.npy"), allow_pickle=True)
self.struct = self.get_structure('Pb2TiZrO6')
trans = SQSTransformation({2: 6, 3: 4}, supercell=[2, 1, 1], total_atoms=None, search_time=0.01)
struct = self.struct.copy()
struct.replace_species({'Ti': {'Ti': 0.5, 'Zr': 0.5}, 'Zr': {'Ti': 0.5, 'Zr': 0.5}})
sqs = trans.apply_transformation(struct)
self.assertEqual(atat.Mcsqs(sqs).to_string() in self.pztstrings, True)
os.remove('sqscell.out')
os.remove('rndstrgrp.out')
os.remove('bestcorr.out')
os.remove('rndstr.in')
os.remove('sym.out')
os.remove('mcsqs.log')
os.remove('bestsqs.out')
os.remove('clusters.out')
class CubicSupercellTransformationTest(PymatgenTest):
def test_apply_transformation(self):
structure = self.get_structure('TlBiSe2')
min_atoms = 100
max_atoms = 1000
num_nn_dists = 5
# Test the transformation without constraining trans_mat to be diagonal
supercell_generator = CubicSupercellTransformation(min_atoms=min_atoms,
max_atoms=max_atoms,
num_nn_dists=num_nn_dists)
superstructure = supercell_generator.apply_transformation(structure)
num_atoms = superstructure.num_sites
self.assertTrue(num_atoms >= min_atoms)
self.assertTrue(num_atoms <= max_atoms)
self.assertTrue(supercell_generator.smallest_dim >=
num_nn_dists * supercell_generator.nn_dist)
self.assertArrayAlmostEqual(superstructure.lattice.matrix[0],
[1.49656087e+01, -1.11448000e-03, 9.04924836e+00])
self.assertArrayAlmostEqual(superstructure.lattice.matrix[1],
[-0.95005506, 14.95766342, 10.01819773])
self.assertArrayAlmostEqual(superstructure.lattice.matrix[2],
[3.69130000e-02, 4.09320200e-02, 5.90830153e+01])
self.assertEqual(superstructure.num_sites, 448)
self.assertArrayEqual(supercell_generator.trans_mat,
np.array([[4, 0, 0],
[1, 4, -4],
[0, 0, 1]]))
# Test the diagonal transformation
structure2 = self.get_structure('Si')
sga = SpacegroupAnalyzer(structure2)
structure2 = sga.get_primitive_standard_structure()
diagonal_supercell_generator = CubicSupercellTransformation(min_atoms=min_atoms,
max_atoms=max_atoms,
num_nn_dists=num_nn_dists,
force_diagonal_transformation=True)
superstructure2 = diagonal_supercell_generator.apply_transformation(structure2)
self.assertArrayEqual(diagonal_supercell_generator.trans_mat,
np.array([[4, 0, 0],
[0, 4, 0],
[0, 0, 4]]))
class AddAdsorbateTransformationTest(PymatgenTest):
def test_apply_transformation(self):
co = Molecule(["C", "O"], [[0, 0, 0], [0, 0, 1.23]])
trans = AddAdsorbateTransformation(co)
pt = Structure(Lattice.cubic(5), ["Pt"], [[0, 0, 0]]) # fictitious
slab = SlabTransformation([0, 0, 1], 20, 10).apply_transformation(pt)
out = trans.apply_transformation(slab)
self.assertEqual(out.composition.reduced_formula, "Pt4CO")
class SubstituteSurfaceSiteTransformationTest(PymatgenTest):
def test_apply_transformation(self):
trans = SubstituteSurfaceSiteTransformation("Au")
pt = Structure(Lattice.cubic(5), ["Pt"], [[0, 0, 0]]) # fictitious
slab = SlabTransformation([0, 0, 1], 20, 10).apply_transformation(pt)
out = trans.apply_transformation(slab)
self.assertEqual(out.composition.reduced_formula, "Pt3Au")
if __name__ == "__main__":
unittest.main()
|
fraricci/pymatgen
|
pymatgen/transformations/tests/test_advanced_transformations.py
|
Python
|
mit
| 32,310
|
[
"VASP",
"pymatgen"
] |
241784c62eab228712c7546a0c0b6a21954133aa9ea25c5e5d01cbf34b411f6a
|
"""
Extract the reads from a fastq file that are not in the bam file
"""
import os
import sys
import argparse
import pysam
from roblib import stream_fastq
def reads_from_bam(bamf, verbose=False):
"""
Read the reads in a bam file
:param bamf: bam file
:param verbose: more output
:return: a set of read ids in the file
"""
reads = set()
bamfile = pysam.AlignmentFile(bamf, "rb")
for read in bamfile.fetch(until_eof=True):
reads.add(read.query_name)
if verbose:
sys.stderr.write("There are {} reads in the bamfile\n".format(len(reads)))
return reads
def extract_fastq(fqf, reads, verbose):
"""
Extract the reads from the fastq file
:param fqf: fastq file
:param reads: set of reads to ignore
:param verbose: more output
:return: nada
"""
for (sid, label, seq, qual) in stream_fastq(fqf):
if sid.startswith('@'):
sid = sid[1:]
if sid not in reads:
if verbose:
sys.stderr.write("Keeping: {} --> {}\n".format(sid, label))
print("@{}\n{}\n+\n{}".format(label, seq, qual))
elif verbose:
sys.stderr.write("Skipping: {} --> {}\n".format(sid, label))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Extract fastq reads that are not in the bamfile")
parser.add_argument('-f', help='fastq file', required=True)
parser.add_argument('-b', help='bamfile', required=True)
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
reads = reads_from_bam(args.b, args.v)
extract_fastq(args.f, reads, args.v)
|
linsalrob/crAssphage
|
bin/bam/fastq_not_in_bam.py
|
Python
|
mit
| 1,673
|
[
"pysam"
] |
6dc8d77784ab10d45e9a15f541caa53ecc09723d9ad24e428eb2b6d139b22d77
|
# -*- coding: utf-8 -*-
""" GIS Module
@requires: U{B{I{gluon}} <http://web2py.com>}
@requires: U{B{I{shapely}} <http://trac.gispython.org/lab/wiki/Shapely>}
@copyright: (c) 2010-2012 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["GIS",
"S3Map",
"GoogleGeocoder",
"YahooGeocoder",
"S3ExportPOI",
"S3ImportPOI"
]
import os
import re
import sys
#import logging
import urllib # Needed for urlencoding
import urllib2 # Needed for quoting & error handling on fetch
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from datetime import timedelta # Needed for Feed Refresh checks
try:
from lxml import etree # Needed to follow NetworkLinks
except ImportError:
print >> sys.stderr, "ERROR: lxml module needed for XML handling"
raise
KML_NAMESPACE = "http://earth.google.com/kml/2.2"
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
# Here are dependencies listed for reference:
#from gluon import current
#from gluon.html import *
#from gluon.http import HTTP, redirect
from gluon.dal import Rows
from gluon.storage import Storage, Messages
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from s3fields import s3_all_meta_field_names
from s3search import S3Search
from s3track import S3Trackable
from s3utils import s3_debug, s3_fullname, s3_has_foreign_key
from s3rest import S3Method
DEBUG = False
if DEBUG:
import datetime
print >> sys.stderr, "S3GIS: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# Map WKT types to db types
GEOM_TYPES = {
"point": 1,
"linestring": 2,
"polygon": 3,
"multipoint": 4,
"multilinestring": 5,
"multipolygon": 6,
"geometrycollection": 7,
}
# km
RADIUS_EARTH = 6371.01
# Garmin GPS Symbols
GPS_SYMBOLS = [
"Airport",
"Amusement Park"
"Ball Park",
"Bank",
"Bar",
"Beach",
"Bell",
"Boat Ramp",
"Bowling",
"Bridge",
"Building",
"Campground",
"Car",
"Car Rental",
"Car Repair",
"Cemetery",
"Church",
"Circle with X",
"City (Capitol)",
"City (Large)",
"City (Medium)",
"City (Small)",
"Civil",
"Contact, Dreadlocks",
"Controlled Area",
"Convenience Store",
"Crossing",
"Dam",
"Danger Area",
"Department Store",
"Diver Down Flag 1",
"Diver Down Flag 2",
"Drinking Water",
"Exit",
"Fast Food",
"Fishing Area",
"Fitness Center",
"Flag",
"Forest",
"Gas Station",
"Geocache",
"Geocache Found",
"Ghost Town",
"Glider Area",
"Golf Course",
"Green Diamond",
"Green Square",
"Heliport",
"Horn",
"Hunting Area",
"Information",
"Levee",
"Light",
"Live Theater",
"Lodging",
"Man Overboard",
"Marina",
"Medical Facility",
"Mile Marker",
"Military",
"Mine",
"Movie Theater",
"Museum",
"Navaid, Amber",
"Navaid, Black",
"Navaid, Blue",
"Navaid, Green",
"Navaid, Green/Red",
"Navaid, Green/White",
"Navaid, Orange",
"Navaid, Red",
"Navaid, Red/Green",
"Navaid, Red/White",
"Navaid, Violet",
"Navaid, White",
"Navaid, White/Green",
"Navaid, White/Red",
"Oil Field",
"Parachute Area",
"Park",
"Parking Area",
"Pharmacy",
"Picnic Area",
"Pizza",
"Post Office",
"Private Field",
"Radio Beacon",
"Red Diamond",
"Red Square",
"Residence",
"Restaurant",
"Restricted Area",
"Restroom",
"RV Park",
"Scales",
"Scenic Area",
"School",
"Seaplane Base",
"Shipwreck",
"Shopping Center",
"Short Tower",
"Shower",
"Skiing Area",
"Skull and Crossbones",
"Soft Field",
"Stadium",
"Summit",
"Swimming Area",
"Tall Tower",
"Telephone",
"Toll Booth",
"TracBack Point",
"Trail Head",
"Truck Stop",
"Tunnel",
"Ultralight Area",
"Water Hydrant",
"Waypoint",
"White Buoy",
"White Dot",
"Zoo"
]
# -----------------------------------------------------------------------------
class GIS(object):
"""
GeoSpatial functions
"""
def __init__(self):
messages = current.messages
#messages.centroid_error = str(A("Shapely", _href="http://pypi.python.org/pypi/Shapely/", _target="_blank")) + " library not found, so can't find centroid!"
messages.centroid_error = "Shapely library not functional, so can't find centroid! Install Geos & Shapely for Line/Polygon support"
messages.unknown_type = "Unknown Type!"
messages.invalid_wkt_point = "Invalid WKT: must be like POINT(3 4)"
messages.invalid_wkt = "Invalid WKT: see http://en.wikipedia.org/wiki/Well-known_text"
messages.lon_empty = "Invalid: Longitude can't be empty if Latitude specified!"
messages.lat_empty = "Invalid: Latitude can't be empty if Longitude specified!"
messages.unknown_parent = "Invalid: %(parent_id)s is not a known Location"
self.DEFAULT_SYMBOL = "White Dot"
self.hierarchy_level_keys = ["L0", "L1", "L2", "L3", "L4"]
self.hierarchy_levels = {}
self.max_allowed_level_num = 4
# -------------------------------------------------------------------------
@staticmethod
def abbreviate_wkt(wkt, max_length=30):
if not wkt:
# Blank WKT field
return None
elif len(wkt) > max_length:
return "%s(...)" % wkt[0:wkt.index("(")]
else:
return wkt
# -------------------------------------------------------------------------
@staticmethod
def gps_symbols():
return GPS_SYMBOLS
# -------------------------------------------------------------------------
def download_kml(self, record_id, filename):
"""
Download a KML file:
- unzip it if-required
- follow NetworkLinks recursively if-required
Save the file to the /uploads folder
Designed to be called asynchronously using:
current.s3task.async("download_kml", [record_id, filename])
@ToDo: Pass error messages to Result & have JavaScript listen for these
"""
layer = KMLLayer()
table = layer.table
record = current.db(table.id == record_id).select(table.url,
limitby=(0, 1)
).first()
url = record.url
cachepath = layer.cachepath
filepath = os.path.join(cachepath, filename)
warning = self.fetch_kml(url, filepath)
# @ToDo: Handle errors
#query = (cachetable.name == name)
if "URLError" in warning or "HTTPError" in warning:
# URL inaccessible
if os.access(filepath, os.R_OK):
statinfo = os.stat(filepath)
if statinfo.st_size:
# Use cached version
#date = db(query).select(cachetable.modified_on,
# limitby=(0, 1)).first().modified_on
#response.warning += "%s %s %s\n" % (url,
# T("not accessible - using cached version from"),
# str(date))
#url = URL(c="default", f="download",
# args=[filename])
pass
else:
# 0k file is all that is available
#response.warning += "%s %s\n" % (url,
# T("not accessible - no cached version available!"))
# skip layer
return
else:
# No cached version available
#response.warning += "%s %s\n" % (url,
# T("not accessible - no cached version available!"))
# skip layer
return
else:
# Download was succesful
#db(query).update(modified_on=request.utcnow)
if "ParseError" in warning:
# @ToDo Parse detail
#response.warning += "%s: %s %s\n" % (T("Layer"),
# name,
# T("couldn't be parsed so NetworkLinks not followed."))
pass
if "GroundOverlay" in warning or "ScreenOverlay" in warning:
#response.warning += "%s: %s %s\n" % (T("Layer"),
# name,
# T("includes a GroundOverlay or ScreenOverlay which aren't supported in OpenLayers yet, so it may not work properly."))
pass
# -------------------------------------------------------------------------
def fetch_kml(self, url, filepath):
"""
Fetch a KML file:
- unzip it if-required
- follow NetworkLinks recursively if-required
Returns a file object
Designed as a helper function for download_kml()
"""
from gluon.tools import fetch
response = current.response
public_url = current.deployment_settings.get_base_public_url()
warning = ""
local = False
if not url.startswith("http"):
local = True
url = "%s%s" % (public_url, url)
elif len(url) > len(public_url) and url[:len(public_url)] == public_url:
local = True
if local:
# Keep Session for local URLs
import Cookie
cookie = Cookie.SimpleCookie()
cookie[response.session_id_name] = response.session_id
current.session._unlock(response)
try:
file = fetch(url, cookie=cookie)
except urllib2.URLError:
warning = "URLError"
return warning
except urllib2.HTTPError:
warning = "HTTPError"
return warning
else:
try:
file = fetch(url)
except urllib2.URLError:
warning = "URLError"
return warning
except urllib2.HTTPError:
warning = "HTTPError"
return warning
filenames = []
if file[:2] == "PK":
# Unzip
fp = StringIO(file)
import zipfile
myfile = zipfile.ZipFile(fp)
files = myfile.infolist()
main = None
candidates = []
for _file in files:
filename = _file.filename
if filename == "doc.kml":
main = filename
elif filename[-4:] == ".kml":
candidates.append(filename)
if not main:
if candidates:
# Any better way than this to guess which KML file is the main one?
main = candidates[0]
else:
response.error = "KMZ contains no KML Files!"
return ""
# Write files to cache (other than the main one)
request = current.request
path = os.path.join(request.folder, "static", "cache", "kml")
if not os.path.exists(path):
os.makedirs(path)
for _file in files:
filename = _file.filename
if filename != main:
if "/" in filename:
_filename = filename.split("/")
dir = os.path.join(path, _filename[0])
if not os.path.exists(dir):
os.mkdir(dir)
_filepath = os.path.join(path, *_filename)
else:
_filepath = os.path.join(path, filename)
try:
f = open(_filepath, "wb")
except:
# Trying to write the Folder
pass
else:
filenames.append(filename)
__file = myfile.read(filename)
f.write(__file)
f.close()
# Now read the main one (to parse)
file = myfile.read(main)
myfile.close()
# Check for NetworkLink
if "<NetworkLink>" in file:
try:
# Remove extraneous whitespace
parser = etree.XMLParser(recover=True, remove_blank_text=True)
tree = etree.XML(file, parser)
# Find contents of href tag (must be a better way?)
url = ""
for element in tree.iter():
if element.tag == "{%s}href" % KML_NAMESPACE:
url = element.text
if url:
# Follow NetworkLink (synchronously)
warning2 = self.fetch_kml(url, filepath)
warning += warning2
except (etree.XMLSyntaxError,):
e = sys.exc_info()[1]
warning += "<ParseError>%s %s</ParseError>" % (e.line, e.errormsg)
# Check for Overlays
if "<GroundOverlay>" in file:
warning += "GroundOverlay"
if "<ScreenOverlay>" in file:
warning += "ScreenOverlay"
for filename in filenames:
replace = "%s/%s" % (URL(c="static", f="cache", args=["kml"]),
filename)
# Rewrite all references to point to the correct place
# need to catch <Icon><href> (which could be done via lxml)
# & also <description><![CDATA[<img src=" (which can't)
file = file.replace(filename, replace)
# Write main file to cache
f = open(filepath, "w")
f.write(file)
f.close()
return warning
# -------------------------------------------------------------------------
@staticmethod
def get_bearing(lat_start, lon_start, lat_end, lon_end):
"""
Given a Start & End set of Coordinates, return a Bearing
Formula from: http://www.movable-type.co.uk/scripts/latlong.html
"""
import math
# shortcuts
cos = math.cos
sin = math.sin
delta_lon = lon_start - lon_end
bearing = math.atan2(sin(delta_lon) * cos(lat_end),
(cos(lat_start) * sin(lat_end)) - \
(sin(lat_start) * cos(lat_end) * cos(delta_lon))
)
# Convert to a compass bearing
bearing = (bearing + 360) % 360
return bearing
# -------------------------------------------------------------------------
def get_bounds(self, features=[], parent=None):
"""
Calculate the Bounds of a list of Point Features
e.g. When a map is displayed that focuses on a collection of points,
the map is zoomed to show just the region bounding the points.
e.g. To use in GPX export for correct zooming
`
Ensure a minimum size of bounding box, and that the points
are inset from the border.
@param features: A list of point features
@param parent: A location_id to provide a polygonal bounds suitable
for validating child locations
"""
if parent:
table = current.s3db.gis_location
db = current.db
parent = db(table.id == parent).select(table.level,
table.name,
table.parent,
table.path,
table.lon,
table.lat,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max).first()
if parent.lon_min is None or \
parent.lon_max is None or \
parent.lat_min is None or \
parent.lat_max is None or \
parent.lon == parent.lon_min or \
parent.lon == parent.lon_max or \
parent.lat == parent.lat_min or \
parent.lat == parent.lat_max:
# This is unsuitable - try higher parent
if parent.level == "L1":
if parent.parent:
# We can trust that L0 should have the data from prepop
L0 = db(table.id == parent.parent).select(table.name,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max).first()
return L0.lat_min, L0.lon_min, L0.lat_max, L0.lon_max, L0.name
if parent.path:
path = parent.path
else:
path = self.update_location_tree(dict(id=parent))
path_list = map(int, path.split("/"))
rows = db(table.id.belongs(path_list)).select(table.level,
table.name,
table.lat,
table.lon,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max,
orderby=table.level)
row_list = rows.as_list()
row_list.reverse()
ok = False
for row in row_list:
if row["lon_min"] is not None and row["lon_max"] is not None and \
row["lat_min"] is not None and row["lat_max"] is not None and \
row["lon"] != row["lon_min"] != row["lon_max"] and \
row["lat"] != row["lat_min"] != row["lat_max"]:
ok = True
break
if ok:
# This level is suitable
return row["lat_min"], row["lon_min"], row["lat_max"], row["lon_max"], row["name"]
else:
# This level is suitable
return parent.lat_min, parent.lon_min, parent.lat_max, parent.lon_max, parent.name
return -90, -180, 90, 180, None
# Minimum Bounding Box
# - gives a minimum width and height in degrees for the region shown.
# Without this, a map showing a single point would not show any extent around that point.
bbox_min_size = 0.05
# Bounding Box Insets
# - adds a small amount of distance outside the points.
# Without this, the outermost points would be on the bounding box, and might not be visible.
bbox_inset = 0.007
if len(features) > 0:
min_lon = 180
min_lat = 90
max_lon = -180
max_lat = -90
# Is this a simple feature set or the result of a join?
try:
lon = features[0].lon
simple = True
except (AttributeError, KeyError):
simple = False
# @ToDo: Optimised Geospatial routines rather than this crude hack
for feature in features:
try:
if simple:
lon = feature.lon
lat = feature.lat
else:
# A Join
lon = feature.gis_location.lon
lat = feature.gis_location.lat
except AttributeError:
# Skip any rows without the necessary lat/lon fields
continue
# Also skip those set to None. Note must use explicit test,
# as zero is a legal value.
if lon is None or lat is None:
continue
min_lon = min(lon, min_lon)
min_lat = min(lat, min_lat)
max_lon = max(lon, max_lon)
max_lat = max(lat, max_lat)
# Assure a reasonable-sized box.
delta_lon = (bbox_min_size - (max_lon - min_lon)) / 2.0
if delta_lon > 0:
min_lon -= delta_lon
max_lon += delta_lon
delta_lat = (bbox_min_size - (max_lat - min_lat)) / 2.0
if delta_lat > 0:
min_lat -= delta_lat
max_lat += delta_lat
# Move bounds outward by specified inset.
min_lon -= bbox_inset
max_lon += bbox_inset
min_lat -= bbox_inset
max_lat += bbox_inset
else:
# no features
config = GIS.get_config()
if config.min_lat is not None:
min_lat = config.min_lat
else:
min_lat = -90
if config.min_lon is not None:
min_lon = config.min_lon
else:
min_lon = -180
if config.max_lat is not None:
max_lat = config.max_lat
else:
max_lat = 90
if config.max_lon is not None:
max_lon = config.max_lon
else:
max_lon = 180
return dict(min_lon=min_lon, min_lat=min_lat,
max_lon=max_lon, max_lat=max_lat)
# -------------------------------------------------------------------------
@staticmethod
def _lookup_parent_path(feature_id):
"""
Helper that gets parent and path for a location.
"""
db = current.db
table = db.gis_location
feature = db(table.id == feature_id).select(table.id,
table.name,
table.level,
table.path,
table.parent,
limitby=(0, 1)).first()
return feature
# -------------------------------------------------------------------------
@staticmethod
def get_children(id, level=None):
"""
Return a list of IDs of all GIS Features which are children of
the requested feature, using Materialized path for retrieving
the children
@author: Aravind Venkatesan and Ajay Kumar Sreenivasan from NCSU
This has been chosen over Modified Preorder Tree Traversal for
greater efficiency:
http://eden.sahanafoundation.org/wiki/HaitiGISToDo#HierarchicalTrees
@param: level - optionally filter by level
"""
db = current.db
table = db.gis_location
query = (table.deleted == False)
if level:
query = query & (table.level == level)
term = str(id)
query = query & ((table.path.like(term + "/%")) | \
(table.path.like("%/" + term + "/%")))
children = db(query).select(table.id,
table.name)
return children
# -------------------------------------------------------------------------
def get_parents(self, feature_id, feature=None, ids_only=False):
"""
Returns a list containing ancestors of the requested feature.
If the caller already has the location row, including path and
parent fields, they can supply it via feature to avoid a db lookup.
If ids_only is false, each element in the list is a gluon.sql.Row
containing the gis_location record of an ancestor of the specified
location.
If ids_only is true, just returns a list of ids of the parents.
This avoids a db lookup for the parents if the specified feature
has a path.
List elements are in the opposite order as the location path and
exclude the specified location itself, i.e. element 0 is the parent
and the last element is the most distant ancestor.
Assists lazy update of a database without location paths by calling
update_location_tree to get the path.
"""
if not feature or "path" not in feature or "parent" not in feature:
feature = self._lookup_parent_path(feature_id)
if feature and (feature.path or feature.parent):
if feature.path:
path = feature.path
else:
path = self.update_location_tree(feature)
path_list = map(int, path.split("/"))
if len(path_list) == 1:
# No parents -- path contains only this feature.
return None
# Get path in the desired order, without current feature.
reverse_path = path_list[:-1]
reverse_path.reverse()
# If only ids are wanted, stop here.
if ids_only:
return reverse_path
# Retrieve parents - order in which they're returned is arbitrary.
s3db = current.s3db
table = s3db.gis_location
query = (table.id.belongs(reverse_path))
fields = [table.id, table.name, table.level, table.lat, table.lon]
unordered_parents = current.db(query).select(cache=s3db.cache,
*fields)
# Reorder parents in order of reversed path.
unordered_ids = [row.id for row in unordered_parents]
parents = [unordered_parents[unordered_ids.index(path_id)]
for path_id in reverse_path if path_id in unordered_ids]
return parents
else:
return None
# -------------------------------------------------------------------------
def get_parent_per_level(self, results, feature_id,
feature=None,
ids=True,
names=True):
"""
Adds ancestor of requested feature for each level to supplied dict.
If the caller already has the location row, including path and
parent fields, they can supply it via feature to avoid a db lookup.
If a dict is not supplied in results, one is created. The results
dict is returned in either case.
If ids=True and names=False (used by old S3LocationSelectorWidget):
For each ancestor, an entry is added to results, like
ancestor.level : ancestor.id
If ids=False and names=True (used by address_onvalidation):
For each ancestor, an entry is added to results, like
ancestor.level : ancestor.name
If ids=True and names=True (used by new S3LocationSelectorWidget):
For each ancestor, an entry is added to results, like
ancestor.level : {name : ancestor.name, id: ancestor.id}
"""
if not results:
results = {}
id = feature_id
# if we don't have a feature or a feature id return the dict as-is
if not feature_id and not feature:
return results
if not feature_id and "path" not in feature and "parent" in feature:
# gis_location_onvalidation on a Create => no ID yet
# Read the Parent's path instead
feature = self._lookup_parent_path(feature.parent)
id = feature.id
elif not feature or "path" not in feature or "parent" not in feature:
feature = self._lookup_parent_path(feature_id)
if feature and (feature.path or feature.parent):
if feature.path:
path = feature.path
else:
path = self.update_location_tree(feature)
# Get ids of ancestors at each level.
if feature.parent:
strict = self.get_strict_hierarchy(feature.parent)
else:
strict = self.get_strict_hierarchy(id)
if path and strict and not names:
# No need to do a db lookup for parents in this case -- we
# know the levels of the parents from their position in path.
# Note ids returned from db are ints, not strings, so be
# consistent with that.
path_ids = map(int, path.split("/"))
# This skips the last path element, which is the supplied
# location.
for (i, id) in enumerate(path_ids[:-1]):
results["L%i" % i] = id
elif path:
ancestors = self.get_parents(id, feature=feature)
if ancestors:
for ancestor in ancestors:
if ancestor.level and ancestor.level in self.hierarchy_level_keys:
if names and ids:
results[ancestor.level] = Storage()
results[ancestor.level].name = ancestor.name
results[ancestor.level].id = ancestor.id
elif names:
results[ancestor.level] = ancestor.name
else:
results[ancestor.level] = ancestor.id
if not feature_id:
# Add the Parent in (we only need the version required for gis_location onvalidation here)
results[feature.level] = feature.name
if names:
# We need to have entries for all levels
# (both for address onvalidation & new LocationSelector)
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if not results.has_key(key):
results[key] = None
return results
# -------------------------------------------------------------------------
def update_table_hierarchy_labels(self, tablename=None):
"""
Re-set table options that depend on location_hierarchy
Only update tables which are already defined
"""
levels = ["L1", "L2", "L3", "L4"]
labels = self.get_location_hierarchy()
db = current.db
if tablename and tablename in db:
# Update the specific table which has just been defined
table = db[tablename]
if tablename == "gis_location":
labels["L0"] = current.messages.COUNTRY
table.level.requires = \
IS_NULL_OR(IS_IN_SET(labels))
else:
for level in levels:
table[level].label = labels[level]
else:
# Do all Tables which are already defined
# gis_location
if "gis_location" in db:
table = db.gis_location
table.level.requires = \
IS_NULL_OR(IS_IN_SET(labels))
# These tables store location hierarchy info for XSLT export.
# Labels are used for PDF & XLS Reports
tables = ["org_office",
#"pr_person",
"pr_address",
"cr_shelter",
"asset_asset",
#"hms_hospital",
]
for tablename in tables:
if tablename in db:
table = db[tablename]
for level in levels:
table[level].label = labels[level]
# -------------------------------------------------------------------------
@staticmethod
def set_config(config_id=None, force_update_cache=False):
"""
Reads the specified GIS config from the DB, caches it in response.
Passing in a false or non-existent id will cause the personal config,
if any, to be used, else the site config (uuid SITE_DEFAULT), else
their fallback values defined in this class.
If force_update_cache is true, the config will be read and cached in
response even if the specified config is the same as what's already
cached. Used when the config was just written.
The config itself will be available in response.s3.gis.config.
Scalar fields from the gis_config record and its linked
gis_projection record have the same names as the fields in their
tables and can be accessed as response.s3.gis.<fieldname>.
Returns the id of the config it actually used, if any.
@param: config_id. use '0' to set the SITE_DEFAULT
@ToDo: Merge configs for Event
"""
session = current.session
s3 = current.response.s3
all_meta_field_names = s3_all_meta_field_names()
# If an id has been supplied, try it first. If it matches what's in
# response, there's no work to do.
if config_id and not force_update_cache and \
s3.gis.config and \
s3.gis.config.id == config_id:
return
db = current.db
s3db = current.s3db
ctable = s3db.gis_config
mtable = s3db.gis_marker
ptable = s3db.gis_projection
stable = s3db.gis_symbology
ltable = s3db.gis_layer_config
cache = Storage()
row = None
if config_id:
query = (ctable.id == config_id) & \
(mtable.id == stable.marker_id) & \
(stable.id == ctable.symbology_id) & \
(ptable.id == ctable.projection_id)
row = db(query).select(limitby=(0, 1)).first()
elif config_id is 0:
# Use site default.
config = db(ctable.uuid == "SITE_DEFAULT").select(limitby=(0, 1)).first()
if not config:
# No configs found at all
s3.gis.config = cache
return cache
query = (ctable.id == config.id) & \
(mtable.id == stable.marker_id) & \
(stable.id == ctable.symbology_id) & \
(ptable.id == ctable.projection_id)
row = db(query).select(limitby=(0, 1)).first()
# If no id supplied, or the requested config does not exist,
# fall back to personal or site config.
if not row:
# Read personalised config, if available.
auth = current.auth
if auth.is_logged_in():
pe_id = auth.user.pe_id
# OU configs
# List of roles to check (in order)
roles = ["Staff", "Volunteer"]
role_paths = s3db.pr_get_role_paths(pe_id, roles=roles)
# Unordered list of PEs
pes = []
append = pes.append
for role in roles:
if role in role_paths:
# @ToDo: Read the person's gis_config to disambiguate which Path to use, if there are issues
pes = role_paths[role].nodes()
# Staff don't check Volunteer's OUs
break
# Add Personal
pes.insert(0, pe_id)
query = (ctable.pe_id.belongs(pes)) | \
(ctable.uuid == "SITE_DEFAULT")
# Personal may well not be complete, so Left Join
left = [
ptable.on(ptable.id == ctable.projection_id),
stable.on(stable.id == ctable.symbology_id),
mtable.on(mtable.id == stable.marker_id),
]
# Order by pe_type (defined in gis_config)
# @ToDo: Do this purely from the hierarchy
rows = db(query).select(ctable.ALL,
mtable.ALL,
ptable.ALL,
left=left,
orderby=ctable.pe_type)
cache["ids"] = []
exclude = list(all_meta_field_names)
append = exclude.append
for fieldname in ["delete_record", "update_record",
"pe_path",
"gis_layer_config", "gis_menu"]:
append(fieldname)
for row in rows:
config = row["gis_config"]
if not config_id:
config_id = config.id
cache["ids"].append(config.id)
fields = filter(lambda key: key not in exclude,
config)
for key in fields:
if key not in cache or cache[key] is None:
cache[key] = config[key]
if "epsg" not in cache or cache["epsg"] is None:
projection = row["gis_projection"]
for key in ["epsg", "units", "maxResolution", "maxExtent"]:
cache[key] = projection[key] if key in projection else None
if "image" not in cache or cache["image"] is None:
marker = row["gis_marker"]
for key in ["image", "height", "width"]:
cache["marker_%s" % key] = marker[key] if key in marker else None
#if "base" not in cache:
# # Default Base Layer?
# query = (ltable.config_id == config.id) & \
# (ltable.base == True) & \
# (ltable.enabled == True)
# base = db(query).select(ltable.layer_id,
# limitby=(0, 1)).first()
# if base:
# cache["base"] = base.layer_id
# Add NULL values for any that aren't defined, to avoid KeyErrors
for key in ["epsg", "units", "maxResolution", "maxExtent",
"marker_image", "marker_height", "marker_width",
"base"]:
if key not in cache:
cache[key] = None
if not row:
# No personal config or not logged in. Use site default.
config = db(ctable.uuid == "SITE_DEFAULT").select(limitby=(0, 1)).first()
if not config:
# No configs found at all
s3.gis.config = cache
return cache
query = (ctable.id == config.id) & \
(mtable.id == stable.marker_id) & \
(stable.id == ctable.symbology_id) & \
(ptable.id == ctable.projection_id)
row = db(query).select(limitby=(0, 1)).first()
if row and not cache:
# We had a single row
config = row["gis_config"]
config_id = config.id
cache["ids"] = [config_id]
projection = row["gis_projection"]
marker = row["gis_marker"]
fields = filter(lambda key: key not in all_meta_field_names,
config)
for key in fields:
cache[key] = config[key]
for key in ["epsg", "units", "maxResolution", "maxExtent"]:
cache[key] = projection[key] if key in projection else None
for key in ["image", "height", "width"]:
cache["marker_%s" % key] = marker[key] if key in marker else None
# Default Base Layer?
#query = (ltable.config_id == config_id) & \
# (ltable.base == True) & \
# (ltable.enabled == True)
#base = db(query).select(ltable.layer_id,
# limitby=(0, 1)).first()
#if base:
# cache["base"] = base.layer_id
#else:
# cache["base"] = None
# Store the values
s3.gis.config = cache
# Let caller know if their id was valid.
return config_id if row else cache
# -------------------------------------------------------------------------
@staticmethod
def get_config():
"""
Returns the current GIS config structure.
@ToDo: Config() class
"""
gis = current.response.s3.gis
if not gis.config:
# Ask set_config to put the appropriate config in response.
if current.session.s3.gis_config_id:
GIS.set_config(current.session.s3.gis_config_id)
else:
GIS.set_config()
return gis.config
# -------------------------------------------------------------------------
def get_location_hierarchy(self, level=None, location=None):
"""
Returns the location hierarchy and it's labels
@param: level - a specific level for which to lookup the label
@param: location - the location_id to lookup the location for
currently only the actual location is supported
@ToDo: Do a search of parents to allow this
lookup for any location
"""
_levels = self.hierarchy_levels
_location = location
if not location and _levels:
# Use cached value
if level:
if level in _levels:
return _levels[level]
else:
return level
else:
return _levels
T = current.T
COUNTRY = current.messages.COUNTRY
if level == "L0":
return COUNTRY
db = current.db
s3db = current.s3db
table = s3db.gis_hierarchy
fields = [table.uuid,
table.L1,
table.L2,
table.L3,
table.L4,
table.L5]
query = (table.uuid == "SITE_DEFAULT")
if not location:
config = GIS.get_config()
location = config.region_location_id
if location:
# Try the Region, but ensure we have the fallback available in a single query
query = query | (table.location_id == location)
rows = db(query).select(cache=s3db.cache,
*fields)
if len(rows) > 1:
# Remove the Site Default
filter = lambda row: row.uuid == "SITE_DEFAULT"
rows.exclude(filter)
elif not rows:
# prepop hasn't run yet
if level:
return level
levels = OrderedDict()
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if key == "L0":
levels[key] = COUNTRY
else:
levels[key] = key
return levels
row = rows.first()
if level:
try:
return T(row[level])
except:
return level
else:
levels = OrderedDict()
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if key == "L0":
levels[key] = COUNTRY
elif key in row and row[key]:
# Only include rows with values
levels[key] = str(T(row[key]))
if not _location:
# Cache the value
self.hierarchy_levels = levels
if level:
return levels[level]
else:
return levels
# -------------------------------------------------------------------------
def get_strict_hierarchy(self, location=None):
"""
Returns the strict hierarchy value from the current config.
@param: location - the location_id of the record to check
"""
s3db = current.s3db
table = s3db.gis_hierarchy
# Read the system default
# @ToDo: Check for an active gis_config region?
query = (table.uuid == "SITE_DEFAULT")
if location:
# Try the Location's Country, but ensure we have the fallback available in a single query
query = query | (table.location_id == self.get_parent_country(location))
rows = current.db(query).select(table.uuid,
table.strict_hierarchy,
cache=s3db.cache)
if len(rows) > 1:
# Remove the Site Default
filter = lambda row: row.uuid == "SITE_DEFAULT"
rows.exclude(filter)
row = rows.first()
if row:
strict = row.strict_hierarchy
else:
# Pre-pop hasn't run yet
return False
return strict
# -------------------------------------------------------------------------
def get_max_hierarchy_level(self):
"""
Returns the deepest level key (i.e. Ln) in the current hierarchy.
- used by gis_location_onvalidation()
"""
location_hierarchy = self.get_location_hierarchy()
return max(location_hierarchy)
# -------------------------------------------------------------------------
def get_all_current_levels(self, level=None):
"""
Get the current hierarchy levels plus non-hierarchy levels.
"""
all_levels = OrderedDict()
all_levels.update(self.get_location_hierarchy())
#T = current.T
#all_levels["GR"] = T("Location Group")
#all_levels["XX"] = T("Imported")
if level:
try:
return all_levels[level]
except Exception, exception:
return level
else:
return all_levels
# -------------------------------------------------------------------------
# @ToDo: There is nothing stopping someone from making extra configs that
# have country locations as their region location. Need to select here
# only those configs that belong to the hierarchy. If the L0 configs are
# created during initial db creation, then we can tell which they are
# either by recording the max id for an L0 config, or by taking the config
# with lowest id if there are more than one per country. This same issue
# applies to any other use of country configs that relies on getting the
# official set (e.g. looking up hierarchy labels).
def get_edit_level(self, level, id):
"""
Returns the edit_<level> value from the parent country hierarchy.
Used by gis_location_onvalidation()
@param id: the id of the location or an ancestor - used to find
the ancestor country location.
"""
country = self.get_parent_country(id)
s3db = current.s3db
table = s3db.gis_hierarchy
fieldname = "edit_%s" % level
# Read the system default
query = (table.uuid == "SITE_DEFAULT")
if country:
# Try the Location's Country, but ensure we have the fallback available in a single query
query = query | (table.location_id == country)
rows = current.db(query).select(table[fieldname],
cache=s3db.cache)
if len(rows) > 1:
# Remove the Site Default
filter = lambda row: row.uuid == "SITE_DEFAULT"
rows.exclude(filter)
row = rows.first()
edit = row[fieldname]
return edit
# -------------------------------------------------------------------------
@staticmethod
def get_countries(key_type="id"):
"""
Returns country code or L0 location id versus name for all countries.
The lookup is cached in the session
If key_type is "code", these are returned as an OrderedDict with
country code as the key. If key_type is "id", then the location id
is the key. In all cases, the value is the name.
"""
session = current.session
if "gis" not in session:
session.gis = Storage()
gis = session.gis
if gis.countries_by_id:
cached = True
else:
cached = False
if not cached:
s3db = current.s3db
table = s3db.gis_location
ttable = s3db.gis_location_tag
query = (table.level == "L0") & \
(ttable.tag == "ISO2") & \
(ttable.location_id == table.id)
countries = current.db(query).select(table.id,
table.name,
ttable.value,
orderby=table.name)
if not countries:
return []
countries_by_id = OrderedDict()
countries_by_code = OrderedDict()
for row in countries:
location = row["gis_location"]
countries_by_id[location.id] = location.name
countries_by_code[row["gis_location_tag"].value] = location.name
# Cache in the session
gis.countries_by_id = countries_by_id
gis.countries_by_code = countries_by_code
if key_type == "id":
return countries_by_id
else:
return countries_by_code
elif key_type == "id":
return gis.countries_by_id
else:
return gis.countries_by_code
# -------------------------------------------------------------------------
@staticmethod
def get_country(key, key_type="id"):
"""
Returns country name for given code or id from L0 locations.
The key can be either location id or country code, as specified
by key_type.
"""
if key:
if current.gis.get_countries(key_type):
if key_type == "id":
return current.session.gis.countries_by_id[key]
else:
return current.session.gis.countries_by_code[key]
return None
# -------------------------------------------------------------------------
def get_parent_country(self, location, key_type="id"):
"""
Returns the parent country for a given record
@param: location: the location or id to search for
@param: key_type: whether to return an id or code
@ToDo: Optimise to not use try/except
"""
db = current.db
s3db = current.s3db
# @ToDo: Avoid try/except here!
# - separate parameters best as even isinstance is expensive
try:
# location is passed as integer (location_id)
table = s3db.gis_location
location = db(table.id == location).select(table.id,
table.path,
table.level,
limitby=(0, 1),
cache=s3db.cache).first()
except:
# location is passed as record
pass
if location.level == "L0":
if key_type == "id":
return location.id
elif key_type == "code":
ttable = s3db.gis_location_tag
query = (ttable.tag == "ISO2") & \
(ttable.location_id == location.id)
tag = db(query).select(ttable.value,
limitby=(0, 1)).first()
try:
return tag.value
except:
return None
else:
parents = self.get_parents(location.id,
feature=location)
if parents:
for row in parents:
if row.level == "L0":
if key_type == "id":
return row.id
elif key_type == "code":
ttable = s3db.gis_location_tag
query = (ttable.tag == "ISO2") & \
(ttable.location_id == row.id)
tag = db(query).select(ttable.value,
limitby=(0, 1)).first()
try:
return tag.value
except:
return None
return None
# -------------------------------------------------------------------------
def get_default_country(self, key_type="id"):
"""
Returns the default country for the active gis_config
@param: key_type: whether to return an id or code
"""
config = GIS.get_config()
if config.default_location_id:
return self.get_parent_country(config.default_location_id)
return None
# -------------------------------------------------------------------------
def get_features_in_polygon(self, location, tablename=None, category=None):
"""
Returns a gluon.sql.Rows of Features within a Polygon.
The Polygon can be either a WKT string or the ID of a record in the
gis_location table
Currently unused.
@ToDo: Optimise to not use try/except
"""
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
db = current.db
s3db = current.s3db
locations = s3db.gis_location
try:
location_id = int(location)
# Check that the location is a polygon
query = (locations.id == location_id)
location = db(query).select(locations.wkt,
locations.lon_min,
locations.lon_max,
locations.lat_min,
locations.lat_max,
limitby=(0, 1)).first()
if location:
wkt = location.wkt
if wkt and (wkt.startswith("POLYGON") or \
wkt.startswith("MULTIPOLYGON")):
# ok
lon_min = location.lon_min
lon_max = location.lon_max
lat_min = location.lat_min
lat_max = location.lat_max
else:
s3_debug("Location searched within isn't a Polygon!")
return None
except: # @ToDo: need specific exception
wkt = location
if (wkt.startswith("POLYGON") or wkt.startswith("MULTIPOLYGON")):
# ok
lon_min = None
else:
s3_debug("This isn't a Polygon!")
return None
try:
polygon = wkt_loads(wkt)
except: # @ToDo: need specific exception
s3_debug("Invalid Polygon!")
return None
table = s3db[tablename]
if "location_id" not in table.fields():
# @ToDo: Add any special cases to be able to find the linked location
s3_debug("This table doesn't have a location_id!")
return None
query = (table.location_id == locations.id)
if "deleted" in table.fields:
query = query & (table.deleted == False)
# @ToDo: Check AAA (do this as a resource filter?)
features = db(query).select(locations.wkt,
locations.lat,
locations.lon,
table.ALL)
output = Rows()
# @ToDo: provide option to use PostGIS/Spatialite
# settings = current.deployment_settings
# if settings.gis.spatialdb and settings.database.db_type == "postgres":
if lon_min is None:
# We have no BBOX so go straight to the full geometry check
for row in features:
_location = row.gis_location
wkt = _location.wkt
if wkt is None:
lat = _location.lat
lon = _location.lon
if lat is not None and lon is not None:
wkt = self.latlon_to_wkt(lat, lon)
else:
continue
try:
shape = wkt_loads(wkt)
if shape.intersects(polygon):
# Save Record
output.records.append(row)
except ReadingError:
s3_debug(
"Error reading wkt of location with id",
value=row.id
)
else:
# 1st check for Features included within the bbox (faster)
def in_bbox(row):
_location = row.gis_location
return (_location.lon > lon_min) & \
(_location.lon < lon_max) & \
(_location.lat > lat_min) & \
(_location.lat < lat_max)
for row in features.find(lambda row: in_bbox(row)):
# Search within this subset with a full geometry check
# Uses Shapely.
_location = row.gis_location
wkt = _location.wkt
if wkt is None:
lat = _location.lat
lon = _location.lon
if lat is not None and lon is not None:
wkt = self.latlon_to_wkt(lat, lon)
else:
continue
try:
shape = wkt_loads(wkt)
if shape.intersects(polygon):
# Save Record
output.records.append(row)
except ReadingError:
s3_debug(
"Error reading wkt of location with id",
value = row.id,
)
return output
# -------------------------------------------------------------------------
def get_features_in_radius(self, lat, lon, radius, tablename=None, category=None):
"""
Returns Features within a Radius (in km) of a LatLon Location
Unused
"""
import math
db = current.db
settings = current.deployment_settings
if settings.gis.spatialdb and settings.database.db_type == "postgres":
# Use PostGIS routine
# The ST_DWithin function call will automatically include a bounding box comparison that will make use of any indexes that are available on the geometries.
# @ToDo: Support optional Category (make this a generic filter?)
import psycopg2
import psycopg2.extras
dbname = settings.database.database
username = settings.database.username
password = settings.database.password
host = settings.database.host
port = settings.database.port or "5432"
# Convert km to degrees (since we're using the_geom not the_geog)
radius = math.degrees(float(radius) / RADIUS_EARTH)
connection = psycopg2.connect("dbname=%s user=%s password=%s host=%s port=%s" % (dbname, username, password, host, port))
cursor = connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
info_string = "SELECT column_name, udt_name FROM information_schema.columns WHERE table_name = 'gis_location' or table_name = '%s';" % tablename
cursor.execute(info_string)
# @ToDo: Look at more optimal queries for just those fields we need
if tablename:
# Lookup the resource
query_string = cursor.mogrify("SELECT * FROM gis_location, %s WHERE %s.location_id = gis_location.id and ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (tablename, tablename, lat, lon, radius))
else:
# Lookup the raw Locations
query_string = cursor.mogrify("SELECT * FROM gis_location WHERE ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (lat, lon, radius))
cursor.execute(query_string)
# @ToDo: Export Rows?
features = []
for record in cursor:
d = dict(record.items())
row = Storage()
# @ToDo: Optional support for Polygons
if tablename:
row.gis_location = Storage()
row.gis_location.id = d["id"]
row.gis_location.lat = d["lat"]
row.gis_location.lon = d["lon"]
row.gis_location.lat_min = d["lat_min"]
row.gis_location.lon_min = d["lon_min"]
row.gis_location.lat_max = d["lat_max"]
row.gis_location.lon_max = d["lon_max"]
row[tablename] = Storage()
row[tablename].id = d["id"]
row[tablename].name = d["name"]
else:
row.name = d["name"]
row.id = d["id"]
row.lat = d["lat"]
row.lon = d["lon"]
row.lat_min = d["lat_min"]
row.lon_min = d["lon_min"]
row.lat_max = d["lat_max"]
row.lon_max = d["lon_max"]
features.append(row)
return features
#elif settings.database.db_type == "mysql":
# Do the calculation in MySQL to pull back only the relevant rows
# Raw MySQL Formula from: http://blog.peoplesdns.com/archives/24
# PI = 3.141592653589793, mysql's pi() function returns 3.141593
#pi = math.pi
#query = """SELECT name, lat, lon, acos(SIN( PI()* 40.7383040 /180 )*SIN( PI()*lat/180 ))+(cos(PI()* 40.7383040 /180)*COS( PI()*lat/180) *COS(PI()*lon/180-PI()* -73.99319 /180))* 3963.191
#AS distance
#FROM gis_location
#WHERE 1=1
#AND 3963.191 * ACOS( (SIN(PI()* 40.7383040 /180)*SIN(PI() * lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180))) < = 1.5
#ORDER BY 3963.191 * ACOS((SIN(PI()* 40.7383040 /180)*SIN(PI()*lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180)))"""
# db.executesql(query)
else:
# Calculate in Python
# Pull back all the rows within a square bounding box (faster than checking all features manually)
# Then check each feature within this subset
# http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates
# @ToDo: Support optional Category (make this a generic filter?)
# shortcuts
radians = math.radians
degrees = math.degrees
MIN_LAT = radians(-90) # -PI/2
MAX_LAT = radians(90) # PI/2
MIN_LON = radians(-180) # -PI
MAX_LON = radians(180) # PI
# Convert to radians for the calculation
r = float(radius) / RADIUS_EARTH
radLat = radians(lat)
radLon = radians(lon)
# Calculate the bounding box
minLat = radLat - r
maxLat = radLat + r
if (minLat > MIN_LAT) and (maxLat < MAX_LAT):
deltaLon = math.asin(math.sin(r) / math.cos(radLat))
minLon = radLon - deltaLon
if (minLon < MIN_LON):
minLon += 2 * math.pi
maxLon = radLon + deltaLon
if (maxLon > MAX_LON):
maxLon -= 2 * math.pi
else:
# Special care for Poles & 180 Meridian:
# http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates#PolesAnd180thMeridian
minLat = max(minLat, MIN_LAT)
maxLat = min(maxLat, MAX_LAT)
minLon = MIN_LON
maxLon = MAX_LON
# Convert back to degrees
minLat = degrees(minLat)
minLon = degrees(minLon)
maxLat = degrees(maxLat)
maxLon = degrees(maxLon)
# shortcut
locations = db.gis_location
query = (locations.lat > minLat) & (locations.lat < maxLat) & (locations.lon > minLon) & (locations.lon < maxLon)
deleted = (locations.deleted == False)
empty = (locations.lat != None) & (locations.lon != None)
query = deleted & empty & query
if tablename:
# Lookup the resource
table = current.s3db[tablename]
query = query & (table.location_id == locations.id)
records = db(query).select(table.ALL,
locations.id,
locations.name,
locations.level,
locations.lat,
locations.lon,
locations.lat_min,
locations.lon_min,
locations.lat_max,
locations.lon_max)
else:
# Lookup the raw Locations
records = db(query).select(locations.id,
locations.name,
locations.level,
locations.lat,
locations.lon,
locations.lat_min,
locations.lon_min,
locations.lat_max,
locations.lon_max)
features = Rows()
for record in records:
# Calculate the Great Circle distance
if tablename:
distance = self.greatCircleDistance(lat,
lon,
record.gis_location.lat,
record.gis_location.lon)
else:
distance = self.greatCircleDistance(lat,
lon,
record.lat,
record.lon)
if distance < radius:
features.records.append(record)
else:
# skip
continue
return features
# -------------------------------------------------------------------------
def get_latlon(self, feature_id, filter=False):
"""
Returns the Lat/Lon for a Feature
used by display_feature() in gis controller
@param feature_id: the feature ID
@param filter: Filter out results based on deployment_settings
"""
db = current.db
table = db.gis_location
feature = db(table.id == feature_id).select(table.id,
table.lat,
table.lon,
table.parent,
table.path,
limitby=(0, 1)).first()
# Zero is an allowed value, hence explicit test for None.
if "lon" in feature and "lat" in feature and \
(feature.lat is not None) and (feature.lon is not None):
return dict(lon=feature.lon, lat=feature.lat)
else:
# Step through ancestors to first with lon, lat.
parents = self.get_parents(feature.id, feature=feature)
if parents:
lon = lat = None
for row in parents:
if "lon" in row and "lat" in row and \
(row.lon is not None) and (row.lat is not None):
return dict(lon=row.lon, lat=row.lat)
# Invalid feature_id
return None
# -------------------------------------------------------------------------
@staticmethod
def get_marker(controller=None,
function=None,
):
"""
Returns a Marker dict
- called by S3REST: S3Resource.export_tree() for non-geojson resources
- called by S3Search
"""
marker = None
if controller and function:
# Lookup marker in the gis_feature table
db = current.db
s3db = current.s3db
ftable = s3db.gis_layer_feature
ltable = s3db.gis_layer_symbology
mtable = s3db.gis_marker
try:
symbology_id = current.response.s3.gis.config.symbology_id
except:
# Config not initialised yet
config = current.gis.get_config()
symbology_id = config.symbology_id
query = (ftable.controller == controller) & \
(ftable.function == function) & \
(ftable.layer_id == ltable.layer_id) & \
(ltable.symbology_id == symbology_id) & \
(ltable.marker_id == mtable.id)
marker = db(query).select(mtable.image,
mtable.height,
mtable.width,
ltable.gps_marker).first()
if marker:
_marker = marker["gis_marker"]
marker = dict(image=_marker.image,
height=_marker.height,
width=_marker.width,
gps_marker=marker["gis_layer_symbology"].gps_marker
)
if not marker:
# Default
marker = Marker().as_dict()
return marker
# -------------------------------------------------------------------------
@staticmethod
def get_locations_and_popups(resource,
layer_id=None
):
"""
Returns the locations and popup tooltips for a Map Layer
e.g. Feature Layers or Search results (Feature Resources)
Called by S3REST: S3Resource.export_tree()
@param: resource - S3Resource instance (required)
@param: layer_id - db.gis_layer_feature.id (Feature Layers only)
"""
if DEBUG:
start = datetime.datetime.now()
db = current.db
s3db = current.s3db
request = current.request
format = current.auth.permission.format
ftable = s3db.gis_layer_feature
layer = None
if layer_id:
# Feature Layer called by S3REST: S3Resource.export_tree()
query = (ftable.id == layer_id)
layer = db(query).select(ftable.trackable,
ftable.polygons,
ftable.popup_label,
ftable.popup_fields,
limitby=(0, 1)).first()
else:
# e.g. Search results loaded as a Feature Resource layer
query = (ftable.controller == request.controller) & \
(ftable.function == request.function)
layers = db(query).select(ftable.trackable,
ftable.polygons,
ftable.popup_label,
ftable.popup_fields,
)
if len(layers) > 1:
# We can't provide details for the whole layer, but need to do a per-record check
# Suggest creating separate controllers to avoid this problem
return None
elif layers:
layer = layers.first()
if layer:
popup_label = layer.popup_label
popup_fields = layer.popup_fields
trackable = layer.trackable
polygons = layer.polygons
else:
popup_label = ""
popup_fields = "name"
trackable = False
polygons = False
table = resource.table
tablename = resource.tablename
tooltips = {}
if format == "geojson":
# Build the Popup Tooltips now so that representations can be
# looked-up in bulk rather than as a separate lookup per record
label_off = request.vars.get("label_off", None)
if popup_label and not label_off:
_tooltip = "(%s)" % current.T(popup_label)
else:
_tooltip = ""
if popup_fields:
popup_fields = popup_fields.split("/")
if popup_fields:
represents = {}
for fieldname in popup_fields:
if fieldname in table:
field = table[fieldname]
_represents = GIS.get_representation(field, resource)
represents[fieldname] = _represents
else:
# Assume a virtual field
represents[fieldname] = None
for record in resource:
tooltip = _tooltip
if popup_fields:
first = True
for fieldname in popup_fields:
try:
value = record[fieldname]
except:
# Field not in table
# This isn't working for some reason :-? AttributeError raised by dal.py & not caught
continue
# Ignore blank fields
if not value:
continue
field_reps = represents[fieldname]
if field_reps:
try:
represent = field_reps[value]
except:
# list:string
represent = field_reps[str(value)]
else:
# Virtual Field
represent = value
if first:
tooltip = "%s %s" % (represent, tooltip)
first = False
elif value:
tooltip = "%s<br />%s" % (tooltip, represent)
tooltips[record.id] = tooltip
tooltips[tablename] = tooltips
if DEBUG:
end = datetime.datetime.now()
duration = end - start
duration = '{:.2f}'.format(duration.total_seconds())
query = (ftable.id == layer_id)
layer_name = db(query).select(ftable.name,
limitby=(0, 1)).first().name
_debug("tooltip lookup of layer %s completed in %s seconds" % \
(layer_name, duration))
# Lookup the LatLons now so that it can be done as a single
# query rather than per record
if DEBUG:
start = datetime.datetime.now()
latlons = {}
wkts = {}
geojsons = {}
gtable = s3db.gis_location
if trackable:
# Use S3Track
ids = resource._ids
# Ensure IDs in ascending order
ids.sort()
try:
tracker = S3Trackable(table, record_ids=ids)
except SyntaxError:
# This table isn't trackable
pass
else:
_latlons = tracker.get_location(_fields=[gtable.lat,
gtable.lon])
index = 0
for id in ids:
_location = _latlons[index]
latlons[id] = (_location.lat, _location.lon)
index += 1
if not latlons:
if "location_id" in table.fields:
query = (table.id.belongs(resource._ids)) & \
(table.location_id == gtable.id)
elif "site_id" in table.fields:
stable = s3db.org_site
query = (table.id.belongs(resource._ids)) & \
(table.site_id == stable.site_id) & \
(stable.location_id == gtable.id)
else:
# Can't display this resource on the Map
return None
if polygons:
if current.deployment_settings.get_gis_spatialdb():
if format == "geojson":
# Do the Simplify & GeoJSON direct from the DB
rows = db(query).select(table.id,
gtable.the_geom.st_simplify(0.01).st_asgeojson(precision=4).with_alias("geojson"))
for row in rows:
geojsons[row[tablename].id] = row["gis_location"].geojson
else:
# Do the Simplify direct from the DB
rows = db(query).select(table.id,
gtable.the_geom.st_simplify(0.01).st_astext().with_alias("wkt"))
for row in rows:
wkts[row[tablename].id] = row["gis_location"].wkt
else:
rows = db(query).select(table.id,
gtable.wkt)
if format == "geojson":
for row in rows:
# Simplify the polygon to reduce download size
geojson = GIS.simplify(row["gis_location"].wkt, output="geojson")
if geojson:
geojsons[row[tablename].id] = geojson
else:
for row in rows:
# Simplify the polygon to reduce download size
# & also to work around the recursion limit in libxslt
# http://blog.gmane.org/gmane.comp.python.lxml.devel/day=20120309
wkt = GIS.simplify(row["gis_location"].wkt)
if wkt:
wkts[row[tablename].id] = wkt
else:
# Points
rows = db(query).select(table.id,
gtable.path,
gtable.lat,
gtable.lon)
for row in rows:
_location = row["gis_location"]
latlons[row[tablename].id] = (_location.lat, _location.lon)
_latlons = {}
_latlons[tablename] = latlons
_wkts = {}
_wkts[tablename] = wkts
_geojsons = {}
_geojsons[tablename] = geojsons
if DEBUG:
end = datetime.datetime.now()
duration = end - start
duration = '{:.2f}'.format(duration.total_seconds())
_debug("latlons lookup of layer %s completed in %s seconds" % \
(layer_name, duration))
# Used by S3XML's gis_encode()
return dict(latlons = _latlons,
wkts = _wkts,
geojsons = _geojsons,
tooltips = tooltips,
)
# -------------------------------------------------------------------------
@staticmethod
def get_representation(field,
resource=None,
value=None):
"""
Return a quick representation for a Field based on it's value
- faster than field.represent(value)
Used by get_locations_and_popup()
@ToDo: Move out of S3GIS
"""
db = current.db
s3db = current.s3db
cache = current.cache
fieldname = field.name
tablename = field.tablename
if resource:
# We can lookup the representations in bulk rather than 1/record
if DEBUG:
start = datetime.datetime.now()
represents = {}
values = [record[fieldname] for record in resource]
# Deduplicate including non-hashable types (lists)
#values = list(set(values))
seen = set()
values = [ x for x in values if str(x) not in seen and not seen.add(str(x)) ]
if fieldname == "type":
if tablename == "hrm_human_resource":
for value in values:
represents[value] = s3db.hrm_type_opts.get(value, "")
elif tablename == "org_office":
for value in values:
represents[value] = s3db.org_office_type_opts.get(value, "")
elif s3_has_foreign_key(field, m2m=False):
tablename = field.type[10:]
if tablename == "pr_person":
represents = s3_fullname(values)
# Need to modify this function to be able to handle bulk lookups
#for value in values:
# represents[value] = s3_fullname(value)
else:
table = s3db[tablename]
if "name" in table.fields:
# Simple Name lookup faster than full represent
rows = db(table.id.belongs(values)).select(table.id,
table.name)
for row in rows:
represents[row.id] = row.name
else:
# Do the normal represent
for value in values:
represents[value] = field.represent(value)
elif field.type.startswith("list"):
# Do the normal represent
for value in values:
represents[str(value)] = field.represent(value)
else:
# Fallback representation is the value itself
for value in values:
represents[value] = value
if DEBUG:
end = datetime.datetime.now()
duration = end - start
duration = '{:.2f}'.format(duration.total_seconds())
_debug("representation of %s completed in %s seconds" % \
(fieldname, duration))
return represents
else:
# We look up the represention for just this one value at a time
# If the field is an integer lookup then returning that isn't much help
if fieldname == "type":
if tablename == "hrm_human_resource":
represent = cache.ram("hrm_type_%s" % value,
lambda: s3db.hrm_type_opts.get(value, ""),
time_expire=60)
elif tablename == "org_office":
represent = cache.ram("office_type_%s" % value,
lambda: s3db.org_office_type_opts.get(value, ""),
time_expire=60)
elif s3_has_foreign_key(field, m2m=False):
tablename = field.type[10:]
if tablename == "pr_person":
# Unlikely to be the same person in multiple popups so no value to caching
represent = s3_fullname(value)
else:
table = s3db[tablename]
if "name" in table.fields:
# Simple Name lookup faster than full represent
represent = cache.ram("%s_%s_%s" % (tablename, fieldname, value),
lambda: db(table.id == value).select(table.name,
limitby=(0, 1)).first().name,
time_expire=60)
else:
# Do the normal represent
represent = cache.ram("%s_%s_%s" % (tablename, fieldname, value),
lambda: field.represent(value),
time_expire=60)
elif field.type.startswith("list"):
# Do the normal represent
represent = cache.ram("%s_%s_%s" % (tablename, fieldname, value),
lambda: field.represent(value),
time_expire=60)
else:
# Fallback representation is the value itself
represent = value
return represent
# -------------------------------------------------------------------------
@staticmethod
def get_theme_geojson(resource):
"""
Lookup Theme Layer polygons once per layer and not per-record
Called by S3REST: S3Resource.export_tree()
"""
db = current.db
s3db = current.s3db
tablename = "gis_theme_data"
table = s3db.gis_theme_data
gtable = s3db.gis_location
query = (table.id.belongs(resource._ids)) & \
(table.location_id == gtable.id)
geojsons = {}
if current.deployment_settings.get_gis_spatialdb():
# Do the Simplify & GeoJSON direct from the DB
rows = db(query).select(table.id,
gtable.the_geom.st_simplify(0.01).st_asgeojson(precision=4).with_alias("geojson"))
for row in rows:
geojsons[row[tablename].id] = row["gis_location"].geojson
else:
rows = db(query).select(table.id,
gtable.wkt)
for row in rows:
# Simplify the polygon to reduce download size
geojson = GIS.simplify(row["gis_location"].wkt, output="geojson")
if geojson:
geojsons[row[tablename].id] = geojson
_geojsons = {}
_geojsons[tablename] = geojsons
# return 'locations'
return dict(geojsons = _geojsons)
# -------------------------------------------------------------------------
@staticmethod
def greatCircleDistance(lat1, lon1, lat2, lon2, quick=True):
"""
Calculate the shortest distance (in km) over the earth's sphere between 2 points
Formulae from: http://www.movable-type.co.uk/scripts/latlong.html
(NB We could also use PostGIS functions, where possible, instead of this query)
"""
import math
# shortcuts
cos = math.cos
sin = math.sin
radians = math.radians
if quick:
# Spherical Law of Cosines (accurate down to around 1m & computationally quick)
lat1 = radians(lat1)
lat2 = radians(lat2)
lon1 = radians(lon1)
lon2 = radians(lon2)
distance = math.acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon2 - lon1)) * RADIUS_EARTH
return distance
else:
# Haversine
#asin = math.asin
sqrt = math.sqrt
pow = math.pow
dLat = radians(lat2 - lat1)
dLon = radians(lon2 - lon1)
a = pow(sin(dLat / 2), 2) + cos(radians(lat1)) * cos(radians(lat2)) * pow(sin(dLon / 2), 2)
c = 2 * math.atan2(sqrt(a), sqrt(1 - a))
#c = 2 * asin(sqrt(a)) # Alternate version
# Convert radians to kilometers
distance = RADIUS_EARTH * c
return distance
# -------------------------------------------------------------------------
@staticmethod
def create_poly(feature):
"""
Create a .poly file for OpenStreetMap exports
http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Format
"""
from shapely.wkt import loads as wkt_loads
name = feature.name
if "wkt" in feature:
wkt = feature.wkt
else:
# WKT not included by default in feature, so retrieve this now
table = current.s3db.gis_location
wkt = current.db(table.id == feature.id).select(table.wkt,
limitby=(0, 1)
).first().wkt
try:
shape = wkt_loads(wkt)
except:
error = "Invalid WKT: %s" % name
s3_debug(error)
return error
geom_type = shape.geom_type
if geom_type == "MultiPolygon":
polygons = shape.geoms
elif geom_type == "Polygon":
polygons = [shape]
else:
error = "Unsupported Geometry: %s, %s" % (name, geom_type)
s3_debug(error)
return error
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
filename = "%s.poly" % name
filepath = os.path.join(TEMP, filename)
File = open(filepath, "w")
File.write("%s\n" % filename)
count = 1
for polygon in polygons:
File.write("%s\n" % count)
points = polygon.exterior.coords
for point in points:
File.write("\t%s\t%s\n" % (point[0], point[1]))
File.write("END\n")
++count
File.write("END\n")
File.close()
return None
# -------------------------------------------------------------------------
@staticmethod
def export_admin_areas(countries=[],
levels=["L0", "L1", "L2", "L3"],
format="geojson",
simplify=0.01,
decimals=4,
):
"""
Export admin areas to /static/cache for use by interactive web-mapping services
- designed for use by the Vulnerability Mapping
@param countries: list of ISO2 country codes
@param levels: list of which Lx levels to export
@param format: Only GeoJSON supported for now (may add KML &/or OSM later)
@param simplify: tolerance for the simplification algorithm. False to disable simplification
@param decimals: number of decimal points to include in the coordinates
"""
db = current.db
s3db = current.s3db
table = s3db.gis_location
ifield = table.id
if countries:
ttable = s3db.gis_location_tag
cquery = (table.level == "L0") & \
(ttable.location_id == ifield) & \
(ttable.tag == "ISO2") & \
(ttable.value.belongs(countries))
else:
# All countries
cquery = (table.level == "L0")
if current.deployment_settings.get_gis_spatialdb():
spatial = True
_field = table.the_geom
if simplify:
# Do the Simplify & GeoJSON direct from the DB
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
else:
# Do the GeoJSON direct from the DB
field = _field.st_asgeojson(precision=decimals).with_alias("geojson")
else:
spatial = False
field = table.wkt
if simplify:
_simplify = GIS.simplify
else:
from shapely.wkt import loads as wkt_loads
from ..geojson import dumps
folder = os.path.join(current.request.folder, "static", "cache")
features = []
append = features.append
if "L0" in levels:
# Reduce the decimals in output by 1
_decimals = decimals -1
if spatial:
if simplify:
field = _field.st_simplify(simplify).st_asgeojson(precision=_decimals).with_alias("geojson")
else:
field = _field.st_asgeojson(precision=_decimals).with_alias("geojson")
countries = db(cquery).select(ifield,
field,
)
for row in countries:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=_decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L0 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=(",", ":"))
if geojson:
f = dict(
type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(
type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "countries.geojson")
File = open(filename, "w")
File.write(json.dumps(data))
File.close()
q1 = (table.level == "L1") & \
(table.deleted != True)
q2 = (table.level == "L2") & \
(table.deleted != True)
q3 = (table.level == "L3") & \
(table.deleted != True)
q4 = (table.level == "L4") & \
(table.deleted != True)
if "L1" in levels:
if "L0" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.005 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
_id = country.id
else:
_id = country["gis_location"].id
query = q1 & (table.parent == _id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L1 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=(",", ":"))
if geojson:
f = dict(
type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(
type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "1_%s.geojson" % _id)
File = open(filename, "w")
File.write(json.dumps(data))
File.close()
else:
s3_debug("No L1 features in %s" % _id)
if "L2" in levels:
if "L0" not in levels and "L1" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 4 # 0.00125 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L2 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=(",", ":"))
if geojson:
f = dict(
type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(
type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "2_%s.geojson" % l1.id)
File = open(filename, "w")
File.write(json.dumps(data))
File.close()
else:
s3_debug("No L2 features in %s" % l1.id)
if "L3" in levels:
if "L0" not in levels and "L1" not in levels and "L2" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.000625 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
l2s = db(query).select(ifield)
for l2 in l2s:
query = q3 & (table.parent == l2.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L3 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=(",", ":"))
if geojson:
f = dict(
type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(
type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "3_%s.geojson" % l2.id)
File = open(filename, "w")
File.write(json.dumps(data))
File.close()
else:
s3_debug("No L3 features in %s" % l2.id)
if "L4" in levels:
if "L0" not in levels and "L1" not in levels and "L2" not in levels and "L3" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.0003125 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
l2s = db(query).select(ifield)
for l2 in l2s:
query = q3 & (table.parent == l2.id)
l3s = db(query).select(ifield)
for l3 in l3s:
query = q4 & (table.parent == l3.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L4 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=(",", ":"))
if geojson:
f = dict(
type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(
type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "4_%s.geojson" % l3.id)
File = open(filename, "w")
File.write(json.dumps(data))
File.close()
else:
s3_debug("No L4 features in %s" % l3.id)
# -------------------------------------------------------------------------
def import_admin_areas(self,
source="gadmv1",
countries=[],
levels=["L0", "L1", "L2"]
):
"""
Import Admin Boundaries into the Locations table
@param source - Source to get the data from.
Currently only GADM is supported: http://gadm.org
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
@param levels - Which levels of the hierarchy to import.
defaults to all 3 supported levels
"""
if source == "gadmv1":
try:
from osgeo import ogr
except:
s3_debug("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+")
return
if "L0" in levels:
self.import_gadm1_L0(ogr, countries=countries)
if "L1" in levels:
self.import_gadm1(ogr, "L1", countries=countries)
if "L2" in levels:
self.import_gadm1(ogr, "L2", countries=countries)
s3_debug("All done!")
elif source == "gadmv1":
try:
from osgeo import ogr
except:
s3_debug("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+")
return
if "L0" in levels:
self.import_gadm2(ogr, "L0", countries=countries)
if "L1" in levels:
self.import_gadm2(ogr, "L1", countries=countries)
if "L2" in levels:
self.import_gadm2(ogr, "L2", countries=countries)
s3_debug("All done!")
else:
s3_debug("Only GADM is currently supported")
return
return
# -------------------------------------------------------------------------
@staticmethod
def import_gadm1_L0(ogr, countries=[]):
"""
Import L0 Admin Boundaries into the Locations table from GADMv1
- designed to be called from import_admin_areas()
- assumes that basic prepop has been done, so that no new records need to be created
@param ogr - The OGR Python module
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
"""
db = current.db
s3db = current.s3db
table = s3db.gis_location
ttable = s3db.gis_location_tag
layer = {
"url" : "http://gadm.org/data/gadm_v1_lev0_shp.zip",
"zipfile" : "gadm_v1_lev0_shp.zip",
"shapefile" : "gadm1_lev0",
"codefield" : "ISO2", # This field is used to uniquely identify the L0 for updates
"code2field" : "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
}
# Copy the current working directory to revert back to later
old_working_directory = os.getcwd()
# Create the working directory
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp/GADMv1 as a cache
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv1")
try:
os.mkdir(tempPath)
except OSError:
# Folder already exists - reuse
pass
# Set the current working directory
os.chdir(tempPath)
layerName = layer["shapefile"]
# Check if file has already been downloaded
fileName = layer["zipfile"]
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
url = layer["url"]
s3_debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
s3_debug(exception)
return
fp = StringIO(file)
else:
s3_debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
s3_debug("Unzipping %s" % layerName)
import zipfile
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Use OGR to read Shapefile
s3_debug("Opening %s.shp" % layerName)
ds = ogr.Open( "%s.shp" % layerName )
if ds is None:
s3_debug("Open failed.\n")
return
lyr = ds.GetLayerByName( layerName )
lyr.ResetReading()
codeField = layer["codefield"]
code2Field = layer["code2field"]
for feat in lyr:
code = feat.GetField(codeField)
if not code:
# Skip the entries which aren't countries
continue
if countries and code not in countries:
# Skip the countries which we're not interested in
continue
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
pass
else:
query = (table.id == ttable.location_id) & \
(ttable.tag == "ISO2") & \
(ttable.value == code)
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
code2 = feat.GetField(code2Field)
#area = feat.GetField("Shape_Area")
try:
id = db(query).select(table.id,
limitby=(0, 1)).first().id
query = (table.id == id)
db(query).update(gis_feature_type=gis_feature_type,
wkt=wkt)
ttable.insert(location_id = id,
tag = "ISO3",
value = code2)
#ttable.insert(location_id = location_id,
# tag = "area",
# value = area)
except db._adapter.driver.OperationalError, exception:
s3_debug(exception)
else:
s3_debug("No geometry\n")
# Close the shapefile
ds.Destroy()
db.commit()
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
# -------------------------------------------------------------------------
def import_gadm1(self, ogr, level="L1", countries=[]):
"""
Import L1 Admin Boundaries into the Locations table from GADMv1
- designed to be called from import_admin_areas()
- assumes a fresh database with just Countries imported
@param ogr - The OGR Python module
@param level - "L1" or "L2"
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
"""
if level == "L1":
layer = {
"url" : "http://gadm.org/data/gadm_v1_lev1_shp.zip",
"zipfile" : "gadm_v1_lev1_shp.zip",
"shapefile" : "gadm1_lev1",
"namefield" : "NAME_1",
# Uniquely identify the L1 for updates
"sourceCodeField" : "ID_1",
"edenCodeField" : "GADM1",
# Uniquely identify the L0 for parenting the L1s
"parent" : "L0",
"parentSourceCodeField" : "ISO",
"parentEdenCodeField" : "ISO3",
}
elif level == "L2":
layer = {
"url" : "http://biogeo.ucdavis.edu/data/gadm/gadm_v1_lev2_shp.zip",
"zipfile" : "gadm_v1_lev2_shp.zip",
"shapefile" : "gadm_v1_lev2",
"namefield" : "NAME_2",
# Uniquely identify the L2 for updates
"sourceCodeField" : "ID_2",
"edenCodeField" : "GADM2",
# Uniquely identify the L0 for parenting the L1s
"parent" : "L1",
"parentSourceCodeField" : "ID_1",
"parentEdenCodeField" : "GADM1",
}
else:
s3_debug("Level %s not supported!" % level)
return
import csv
import shutil
import zipfile
db = current.db
s3db = current.s3db
cache = s3db.cache
table = s3db.gis_location
ttable = s3db.gis_location_tag
csv.field_size_limit(2**20 * 100) # 100 megs
# Not all the data is encoded like this
# (unable to determine encoding - appears to be damaged in source):
# Azerbaijan L1
# Vietnam L1 & L2
ENCODING = "cp1251"
# from http://docs.python.org/library/csv.html#csv-examples
def latin_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
for row in csv.reader(unicode_csv_data):
yield [unicode(cell, ENCODING) for cell in row]
def latin_dict_reader(data, dialect=csv.excel, **kwargs):
reader = latin_csv_reader(data, dialect=dialect, **kwargs)
headers = reader.next()
for r in reader:
yield dict(zip(headers, r))
# Copy the current working directory to revert back to later
old_working_directory = os.getcwd()
# Create the working directory
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp/GADMv1 as a cache
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv1")
try:
os.mkdir(tempPath)
except OSError:
# Folder already exists - reuse
pass
# Set the current working directory
os.chdir(tempPath)
# Remove any existing CSV folder to allow the new one to be created
try:
shutil.rmtree("CSV")
except OSError:
# Folder doesn't exist, so should be creatable
pass
layerName = layer["shapefile"]
# Check if file has already been downloaded
fileName = layer["zipfile"]
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
url = layer["url"]
s3_debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
s3_debug(exception)
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
fp = StringIO(file)
else:
s3_debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
s3_debug("Unzipping %s" % layerName)
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Convert to CSV
s3_debug("Converting %s.shp to CSV" % layerName)
# Simplified version of generic Shapefile Importer:
# http://svn.osgeo.org/gdal/trunk/gdal/swig/python/samples/ogr2ogr.py
bSkipFailures = False
nGroupTransactions = 200
nFIDToFetch = ogr.NullFID
inputFileName = "%s.shp" % layerName
inputDS = ogr.Open(inputFileName, False)
outputFileName = "CSV"
outputDriver = ogr.GetDriverByName("CSV")
outputDS = outputDriver.CreateDataSource(outputFileName, options=[])
# GADM only has 1 layer/source
inputLayer = inputDS.GetLayer(0)
inputFDefn = inputLayer.GetLayerDefn()
# Create the output Layer
outputLayer = outputDS.CreateLayer(layerName)
# Copy all Fields
papszFieldTypesToString = []
inputFieldCount = inputFDefn.GetFieldCount()
panMap = [-1 for i in range(inputFieldCount)]
outputFDefn = outputLayer.GetLayerDefn()
nDstFieldCount = 0
if outputFDefn is not None:
nDstFieldCount = outputFDefn.GetFieldCount()
for iField in range(inputFieldCount):
inputFieldDefn = inputFDefn.GetFieldDefn(iField)
oFieldDefn = ogr.FieldDefn(inputFieldDefn.GetNameRef(),
inputFieldDefn.GetType())
oFieldDefn.SetWidth(inputFieldDefn.GetWidth())
oFieldDefn.SetPrecision(inputFieldDefn.GetPrecision())
# The field may have been already created at layer creation
iDstField = -1;
if outputFDefn is not None:
iDstField = outputFDefn.GetFieldIndex(oFieldDefn.GetNameRef())
if iDstField >= 0:
panMap[iField] = iDstField
elif outputLayer.CreateField( oFieldDefn ) == 0:
# now that we've created a field, GetLayerDefn() won't return NULL
if outputFDefn is None:
outputFDefn = outputLayer.GetLayerDefn()
panMap[iField] = nDstFieldCount
nDstFieldCount = nDstFieldCount + 1
# Transfer features
nFeaturesInTransaction = 0
iSrcZField = -1
inputLayer.ResetReading()
if nGroupTransactions > 0:
outputLayer.StartTransaction()
while True:
poDstFeature = None
if nFIDToFetch != ogr.NullFID:
# Only fetch feature on first pass.
if nFeaturesInTransaction == 0:
poFeature = inputLayer.GetFeature(nFIDToFetch)
else:
poFeature = None
else:
poFeature = inputLayer.GetNextFeature()
if poFeature is None:
break
nParts = 0
nIters = 1
for iPart in range(nIters):
nFeaturesInTransaction = nFeaturesInTransaction + 1
if nFeaturesInTransaction == nGroupTransactions:
outputLayer.CommitTransaction()
outputLayer.StartTransaction()
nFeaturesInTransaction = 0
poDstFeature = ogr.Feature(outputLayer.GetLayerDefn())
if poDstFeature.SetFromWithMap(poFeature, 1, panMap) != 0:
if nGroupTransactions > 0:
outputLayer.CommitTransaction()
s3_debug("Unable to translate feature %d from layer %s" % (poFeature.GetFID() , inputFDefn.GetName() ))
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
poDstGeometry = poDstFeature.GetGeometryRef()
if poDstGeometry is not None:
if nParts > 0:
# For -explodecollections, extract the iPart(th) of the geometry
poPart = poDstGeometry.GetGeometryRef(iPart).Clone()
poDstFeature.SetGeometryDirectly(poPart)
poDstGeometry = poPart
if outputLayer.CreateFeature( poDstFeature ) != 0 and not bSkipFailures:
if nGroupTransactions > 0:
outputLayer.RollbackTransaction()
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
if nGroupTransactions > 0:
outputLayer.CommitTransaction()
# Cleanup
outputDS.Destroy()
inputDS.Destroy()
fileName = "%s.csv" % layerName
filePath = os.path.join("CSV", fileName)
os.rename(filePath, fileName)
os.removedirs("CSV")
# Use OGR to read SHP for geometry
s3_debug("Opening %s.shp" % layerName)
ds = ogr.Open( "%s.shp" % layerName )
if ds is None:
s3_debug("Open failed.\n")
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
lyr = ds.GetLayerByName(layerName)
lyr.ResetReading()
# Use CSV for Name
s3_debug("Opening %s.csv" % layerName)
rows = latin_dict_reader(open("%s.csv" % layerName))
nameField = layer["namefield"]
sourceCodeField = layer["sourceCodeField"]
edenCodeField = layer["edenCodeField"]
parentSourceCodeField = layer["parentSourceCodeField"]
parentLevel = layer["parent"]
parentEdenCodeField = layer["parentEdenCodeField"]
parentCodeQuery = (ttable.tag == parentEdenCodeField)
count = 0
for row in rows:
# Read Attributes
feat = lyr[count]
parentCode = feat.GetField(parentSourceCodeField)
query = (table.level == parentLevel) & \
parentCodeQuery & \
(ttable.value == parentCode)
parent = db(query).select(table.id,
ttable.value,
limitby=(0, 1),
cache=cache).first()
if not parent:
# Skip locations for which we don't have a valid parent
s3_debug("Skipping - cannot find parent with key: %s, value: %s" % (parentEdenCodeField, parentCode))
count += 1
continue
if countries:
# Skip the countries which we're not interested in
if level == "L1":
if parent["gis_location_tag"].value not in countries:
#s3_debug("Skipping %s as not in countries list" % parent["gis_location_tag"].value)
count += 1
continue
else:
# Check grandparent
country = self.get_parent_country(parent.id, key_type="code")
if country not in countries:
count += 1
continue
# This is got from CSV in order to be able to handle the encoding
name = row.pop(nameField)
name.encode("utf8")
code = feat.GetField(sourceCodeField)
area = feat.GetField("Shape_Area")
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
lat = geom.GetX()
lon = geom.GetY()
id = table.insert(name=name,
level=level,
gis_feature_type=1,
lat=lat,
lon=lon,
parent=parent.id)
ttable.insert(location_id = id,
tag = edenCodeField,
value = code)
# ttable.insert(location_id = id,
# tag = "area",
# value = area)
else:
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
id = table.insert(name=name,
level=level,
gis_feature_type=gis_feature_type,
wkt=wkt,
parent=parent.id)
ttable.insert(location_id = id,
tag = edenCodeField,
value = code)
# ttable.insert(location_id = id,
# tag = "area",
# value = area)
else:
s3_debug("No geometry\n")
count += 1
# Close the shapefile
ds.Destroy()
db.commit()
s3_debug("Updating Location Tree...")
try:
self.update_location_tree()
except MemoryError:
# If doing all L2s, it can break memory limits
# @ToDo: Check now that we're doing by level
s3_debug("Memory error when trying to update_location_tree()!")
db.commit()
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
# -------------------------------------------------------------------------
@staticmethod
def import_gadm2(ogr, level="L0", countries=[]):
"""
Import Admin Boundaries into the Locations table from GADMv2
- designed to be called from import_admin_areas()
- assumes that basic prepop has been done, so that no new L0 records need to be created
@param ogr - The OGR Python module
@param level - The OGR Python module
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
@ToDo: Complete this
- not currently possible to get all data from the 1 file easily
- no ISO2
- needs updating for gis_location_tag model
- only the lowest available levels accessible
- use GADMv1 for L0, L1, L2 & GADMv2 for specific lower?
"""
if level == "L0":
codeField = "ISO2" # This field is used to uniquely identify the L0 for updates
code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
elif level == "L1":
nameField = "NAME_1"
codeField = "ID_1" # This field is used to uniquely identify the L1 for updates
code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
parent = "L0"
parentCode = "code2"
elif level == "L2":
nameField = "NAME_2"
codeField = "ID_2" # This field is used to uniquely identify the L2 for updates
code2Field = "ID_1" # This field is used to uniquely identify the L1 for parenting the L2s
parent = "L1"
parentCode = "code"
else:
s3_debug("Level %s not supported!" % level)
return
db = current.db
s3db = current.s3db
table = s3db.gis_location
url = "http://gadm.org/data2/gadm_v2_shp.zip"
zipfile = "gadm_v2_shp.zip"
shapefile = "gadm2"
# Copy the current working directory to revert back to later
old_working_directory = os.getcwd()
# Create the working directory
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp/GADMv2 as a cache
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv2")
try:
os.mkdir(tempPath)
except OSError:
# Folder already exists - reuse
pass
# Set the current working directory
os.chdir(tempPath)
layerName = shapefile
# Check if file has already been downloaded
fileName = zipfile
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
s3_debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
s3_debug(exception)
return
fp = StringIO(file)
else:
s3_debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
s3_debug("Unzipping %s" % layerName)
import zipfile
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Use OGR to read Shapefile
s3_debug("Opening %s.shp" % layerName)
ds = ogr.Open("%s.shp" % layerName)
if ds is None:
s3_debug("Open failed.\n")
return
lyr = ds.GetLayerByName(layerName)
lyr.ResetReading()
for feat in lyr:
code = feat.GetField(codeField)
if not code:
# Skip the entries which aren't countries
continue
if countries and code not in countries:
# Skip the countries which we're not interested in
continue
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
pass
else:
## FIXME
##query = (table.code == code)
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
code2 = feat.GetField(code2Field)
area = feat.GetField("Shape_Area")
try:
## FIXME
db(query).update(gis_feature_type=gis_feature_type,
wkt=wkt)
#code2=code2,
#area=area
except db._adapter.driver.OperationalError, exception:
s3_debug(exception)
else:
s3_debug("No geometry\n")
# Close the shapefile
ds.Destroy()
db.commit()
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
# -------------------------------------------------------------------------
def import_geonames(self, country, level=None):
"""
Import Locations from the Geonames database
@param country: the 2-letter country code
@param level: the ADM level to import
Designed to be run from the CLI
Levels should be imported sequentially.
It is assumed that L0 exists in the DB already
L1-L3 may have been imported from Shapefiles with Polygon info
Geonames can then be used to populate the lower levels of hierarchy
"""
import codecs
from shapely.geometry import point
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
db = current.db
s3db = current.s3db
cache = s3db.cache
request = current.request
settings = current.deployment_settings
table = s3db.gis_location
ttable = s3db.gis_location_tag
url = "http://download.geonames.org/export/dump/" + country + ".zip"
cachepath = os.path.join(request.folder, "cache")
filename = country + ".txt"
filepath = os.path.join(cachepath, filename)
if os.access(filepath, os.R_OK):
cached = True
else:
cached = False
if not os.access(cachepath, os.W_OK):
s3_debug("Folder not writable", cachepath)
return
if not cached:
# Download File
from gluon.tools import fetch
try:
f = fetch(url)
except (urllib2.URLError,):
e = sys.exc_info()[1]
s3_debug("URL Error", e)
return
except (urllib2.HTTPError,):
e = sys.exc_info()[1]
s3_debug("HTTP Error", e)
return
# Unzip File
if f[:2] == "PK":
# Unzip
fp = StringIO(f)
import zipfile
myfile = zipfile.ZipFile(fp)
try:
# Python 2.6+ only :/
# For now, 2.5 users need to download/unzip manually to cache folder
myfile.extract(filename, cachepath)
myfile.close()
except IOError:
s3_debug("Zipfile contents don't seem correct!")
myfile.close()
return
f = codecs.open(filepath, encoding="utf-8")
# Downloaded file is worth keeping
#os.remove(filepath)
if level == "L1":
fc = "ADM1"
parent_level = "L0"
elif level == "L2":
fc = "ADM2"
parent_level = "L1"
elif level == "L3":
fc = "ADM3"
parent_level = "L2"
elif level == "L4":
fc = "ADM4"
parent_level = "L3"
else:
# 5 levels of hierarchy or 4?
# @ToDo make more extensible still
gis_location_hierarchy = self.get_location_hierarchy()
try:
label = gis_location_hierarchy["L5"]
level = "L5"
parent_level = "L4"
except:
# ADM4 data in Geonames isn't always good (e.g. PK bad)
level = "L4"
parent_level = "L3"
finally:
fc = "PPL"
deleted = (table.deleted == False)
query = deleted & (table.level == parent_level)
# Do the DB query once (outside loop)
all_parents = db(query).select(table.wkt,
table.lon_min,
table.lon_max,
table.lat_min,
table.lat_max,
table.id)
if not all_parents:
# No locations in the parent level found
# - use the one higher instead
parent_level = "L" + str(int(parent_level[1:]) + 1)
query = deleted & (table.level == parent_level)
all_parents = db(query).select(table.wkt,
table.lon_min,
table.lon_max,
table.lat_min,
table.lat_max,
table.id)
# Parse File
current_row = 0
for line in f:
current_row += 1
# Format of file: http://download.geonames.org/export/dump/readme.txt
geonameid,
name,
asciiname,
alternatenames,
lat,
lon,
feature_class,
feature_code,
country_code,
cc2,
admin1_code,
admin2_code,
admin3_code,
admin4_code,
population,
elevation,
gtopo30,
timezone,
modification_date = line.split("\t")
if feature_code == fc:
# Add WKT
lat = float(lat)
lon = float(lon)
wkt = self.latlon_to_wkt(lat, lon)
shape = point.Point(lon, lat)
# Add Bounds
lon_min = lon_max = lon
lat_min = lat_max = lat
# Locate Parent
parent = ""
# 1st check for Parents whose bounds include this location (faster)
def in_bbox(row):
return (row.lon_min < lon_min) & \
(row.lon_max > lon_max) & \
(row.lat_min < lat_min) & \
(row.lat_max > lat_max)
for row in all_parents.find(lambda row: in_bbox(row)):
# Search within this subset with a full geometry check
# Uses Shapely.
# @ToDo provide option to use PostGIS/Spatialite
try:
parent_shape = wkt_loads(row.wkt)
if parent_shape.intersects(shape):
parent = row.id
# Should be just a single parent
break
except ReadingError:
s3_debug("Error reading wkt of location with id", row.id)
# Add entry to database
new_id = table.insert(name=name,
level=level,
parent=parent,
lat=lat,
lon=lon,
wkt=wkt,
lon_min=lon_min,
lon_max=lon_max,
lat_min=lat_min,
lat_max=lat_max)
ttable.insert(location_id=new_id,
tag="geonames",
value=geonames_id)
else:
continue
s3_debug("All done!")
return
# -------------------------------------------------------------------------
@staticmethod
def latlon_to_wkt(lat, lon):
"""
Convert a LatLon to a WKT string
>>> s3gis.latlon_to_wkt(6, 80)
'POINT(80 6)'
"""
WKT = "POINT(%f %f)" % (lon, lat)
return WKT
# -------------------------------------------------------------------------
@staticmethod
def parse_location(wkt, lon=None, lat=None):
"""
Parses a location from wkt, returning wkt, lat, lon, bounding box and type.
For points, wkt may be None if lat and lon are provided; wkt will be generated.
For lines and polygons, the lat, lon returned represent the shape's centroid.
Centroid and bounding box will be None if Shapely is not available.
"""
if not wkt:
if not lon is not None and lat is not None:
raise RuntimeError, "Need wkt or lon+lat to parse a location"
wkt = "POINT(%f %f)" % (lon, lat)
geom_type = GEOM_TYPES["point"]
bbox = (lon, lat, lon, lat)
else:
try:
from shapely.wkt import loads as wkt_loads
SHAPELY = True
except:
SHAPELY = False
if SHAPELY:
shape = wkt_loads(wkt)
centroid = shape.centroid
lat = centroid.y
lon = centroid.x
geom_type = GEOM_TYPES[shape.type.lower()]
bbox = shape.bounds
else:
lat = None
lon = None
geom_type = GEOM_TYPES[wkt.split("(")[0].lower()]
bbox = None
res = {"wkt": wkt, "lat": lat, "lon": lon, "gis_feature_type": geom_type}
if bbox:
res["lon_min"], res["lat_min"], res["lon_max"], res["lat_max"] = bbox
return res
# -------------------------------------------------------------------------
def update_location_tree(self, feature=None):
"""
Update GIS Locations' Materialized path, Lx locations & Lat/Lon
@param feature: a feature dict to update the tree for
- if not provided then update the whole tree
returns the path of the feature
Called onaccept for locations (async, where-possible)
"""
if not feature:
# Do the whole database
# Do in chunks to save memory and also do in correct order
db = current.db
table = db.gis_location
fields = [table.id, table.name, table.gis_feature_type,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.wkt, table.inherited,
table.path, table.parent]
update_location_tree = self.update_location_tree
wkt_centroid = self.wkt_centroid
for level in ["L0", "L1", "L2", "L3", "L4", "L5", None]:
features = db(table.level == level).select(*fields)
for feature in features:
feature["level"] = level
update_location_tree(feature)
# Also do the Bounds/Centroid/WKT
form = Storage()
form.vars = feature
form.errors = Storage()
wkt_centroid(form)
_vars = form.vars
if "lat_max" in _vars:
db(table.id == feature.id).update(gis_feature_type = _vars.gis_feature_type,
lat = _vars.lat,
lon = _vars.lon,
wkt = _vars.wkt,
lat_max = _vars.lat_max,
lat_min = _vars.lat_min,
lon_min = _vars.lon_min,
lon_max = _vars.lon_max)
return
id = "id" in feature and str(feature["id"])
if not id:
# Nothing we can do
raise ValueError
# L0
db = current.db
table = db.gis_location
name = feature.get("name", False)
level = feature.get("level", False)
path = feature.get("path", False)
L0 = feature.get("L0", False)
if level == "L0":
if name:
if path == id and L0 == name:
# No action required
return path
else:
db(table.id == id).update(L0=name,
path=id)
else:
# Look this up
feature = db(table.id == id).select(table.name,
table.path,
table.L0,
limitby=(0, 1)).first()
if feature:
name = feature["name"]
path = feature["path"]
L0 = feature["L0"]
if path == id and L0 == name:
# No action required
return path
else:
db(table.id == id).update(L0=name,
path=id)
return id
# L1
parent = feature.get("parent", False)
L1 = feature.get("L1", False)
lat = feature.get("lat", False)
lon = feature.get("lon", False)
inherited = feature.get("inherited", None)
if level == "L1":
if name is False or lat is False or lon is False or inherited is None or \
parent is False or path is False or L0 is False or L1 is False:
# Get the whole feature
feature = db(table.id == id).select(table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.inherited,
table.L0,
table.L1,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
if parent:
_path = "%s/%s" % (parent, id)
_L0 = db(table.id == parent).select(table.name,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = _L0.name
L0_lat = _L0.lat
L0_lon = _L0.lon
else:
_path = id
L0_name = None
L0_lat = None
L0_lon = None
if path == _path and L1 == name and L0 == L0_name:
if inherited and lat == L0_lat and lon == L0_lon:
# No action required
return path
elif inherited or lat is None or lon is None:
db(table.id == id).update(inherited=True,
lat=L0_lat,
lon=L0_lon)
elif inherited and lat == L0_lat and lon == L0_lon:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=name)
return _path
elif inherited or lat is None or lon is None:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=name,
inherited=True,
lat=L0_lat,
lon=L0_lon)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.inherited]
rows = db(query).select(*fields)
for row in rows:
self.update_location_tree(row)
return _path
# L2
L2 = feature.get("L2", False)
if level == "L2":
if name is False or lat is False or lon is False or inherited is None or \
parent is False or path is False or L0 is False or L1 is False or \
L2 is False:
# Get the whole feature
feature = db(table.id == id).select(table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.inherited,
table.L0,
table.L1,
table.L2,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
if parent:
Lx = db(table.id == parent).select(table.name,
table.level,
table.parent,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
if Lx.level == "L1":
L1_name = Lx.name
_parent = Lx.parent
if _parent:
_path = "%s/%s/%s" % (_parent, parent, id)
L0_name = db(table.id == _parent).select(table.name,
limitby=(0, 1),
cache=current.s3db.cache).first().name
else:
_path = "%s/%s" % (parent, id)
L0_name = None
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
else:
raise ValueError
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L2 == name and L0 == L0_name and \
L1 == L1_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
# No action required
return path
elif inherited or lat is None or lon is None:
db(table.id == id).update(inherited=True,
lat=Lx_lat,
lon=Lx_lon)
elif inherited and lat == Lx_lat and lon == Lx_lon:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=name,
)
return _path
elif inherited or lat is None or lon is None:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.inherited]
rows = db(query).select(*fields)
for row in rows:
self.update_location_tree(row)
return _path
# L3
L3 = feature.get("L3", False)
if level == "L3":
if name is False or lat is False or lon is False or inherited is None or \
parent is False or path is False or L0 is False or L1 is False or \
L2 is False or L3 is False:
# Get the whole feature
feature = db(table.id == id).select(table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.path,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
if Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
else:
s3_debug("S3GIS: Invalid level '%s'" % Lx.level)
return
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
L2_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L3 == name and L0 == L0_name and \
L1 == L1_name and L2 == L2_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
# No action required
return path
elif inherited or lat is None or lon is None:
db(table.id == id).update(inherited=True,
lat=Lx_lat,
lon=Lx_lon)
elif inherited and lat == Lx_lat and lon == Lx_lon:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name,
)
return _path
elif inherited or lat is None or lon is None:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.inherited]
rows = db(query).select(*fields)
for row in rows:
self.update_location_tree(row)
return _path
# L4
L4 = feature.get("L4", False)
if level == "L4":
if name is False or lat is False or lon is False or inherited is None or \
parent is False or path is False or L0 is False or L1 is False or \
L2 is False or L3 is False or \
L4 is False:
# Get the whole feature
feature = db(table.id == id).select(table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
L4 = feature.L4
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.L2,
table.path,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
if Lx.level == "L3":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name and L2_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
elif Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
L3_name = None
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
L3_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
L3_name = None
else:
raise ValueError
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
L2_name = None
L3_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L4 == name and L0 == L0_name and \
L1 == L1_name and L2 == L2_name and \
L3 == L3_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
# No action required
return path
elif inherited or lat is None or lon is None:
db(table.id == id).update(inherited=True,
lat=Lx_lat,
lon=Lx_lon)
elif inherited and lat == Lx_lat and lon == Lx_lon:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name,
)
return _path
elif inherited or lat is None or lon is None:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.inherited]
rows = db(query).select(*fields)
for row in rows:
self.update_location_tree(row)
return _path
# @ToDo: L5
# Specific Location
# - or unspecified (which we should avoid happening)
if name is False or lat is False or lon is False or inherited is None or \
parent is False or path is False or L0 is False or L1 is False or \
L2 is False or L3 is False or \
L4 is False:
# Get the whole feature
feature = db(table.id == id).select(table.name,
table.level,
table.parent,
table.path,
table.lat,
table.lon,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
L4 = feature.L4
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.L2,
table.L3,
table.path,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
if Lx.level == "L4":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
L4_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name and L2_name and L3_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.L3,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
elif Lx.level == "L3":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.name
L4_name = None
_path = Lx.path
if _path and L0_name and L1_name and L2_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
elif Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
L3_name = None
L4_name = None
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
L3_name = None
L4_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = self.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1),
cache=current.s3db.cache).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
L3_name = None
L4_name = None
else:
raise ValueError
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
if feature.level == "L0":
L0_name = name
else:
L0_name = None
L1_name = None
L2_name = None
L3_name = None
L4_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L0 == L0_name and \
L1 == L1_name and L2 == L2_name and \
L3 == L3_name and L4 == L4_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
# No action required
return path
elif inherited or lat is None or lon is None:
db(table.id == id).update(inherited=True,
lat=Lx_lat,
lon=Lx_lon)
elif inherited and lat == Lx_lat and lon == Lx_lon:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
)
elif inherited or lat is None or lon is None:
db(table.id == id).update(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name)
return _path
# -------------------------------------------------------------------------
@staticmethod
def wkt_centroid(form):
"""
OnValidation callback:
If a WKT is defined: validate the format,
calculate the LonLat of the Centroid, and set bounds
Else if a LonLat is defined: calculate the WKT for the Point.
Uses Shapely.
@ToDo: provide an option to use PostGIS/Spatialite
"""
messages = current.messages
vars = form.vars
if vars.gis_feature_type == "1":
# Point
if (vars.lon is None and vars.lat is None) or \
(vars.lon == "" and vars.lat == ""):
# No Geometry available
# Don't clobber existing records (e.g. in Prepop)
#vars.gis_feature_type = "0"
# Cannot create WKT, so Skip
return
elif vars.lat is None or vars.lat == "":
form.errors["lat"] = messages.lat_empty
elif vars.lon is None or vars.lon == "":
form.errors["lon"] = messages.lon_empty
else:
vars.wkt = "POINT(%(lon)s %(lat)s)" % vars
if "lon_min" not in vars or vars.lon_min is None:
vars.lon_min = vars.lon
if "lon_max" not in vars or vars.lon_max is None:
vars.lon_max = vars.lon
if "lat_min" not in vars or vars.lat_min is None:
vars.lat_min = vars.lat
if "lat_max" not in vars or vars.lat_max is None:
vars.lat_max = vars.lat
elif vars.wkt:
# Parse WKT for LineString, Polygon, etc
from shapely.wkt import loads as wkt_loads
try:
shape = wkt_loads(vars.wkt)
except:
try:
# Perhaps this is really a LINESTRING (e.g. OSM import of an unclosed Way)
linestring = "LINESTRING%s" % vars.wkt[8:-1]
shape = wkt_loads(linestring)
vars.wkt = linestring
except:
form.errors["wkt"] = messages.invalid_wkt
return
gis_feature_type = shape.type
if gis_feature_type == "Point":
vars.gis_feature_type = 1
elif gis_feature_type == "LineString":
vars.gis_feature_type = 2
elif gis_feature_type == "Polygon":
vars.gis_feature_type = 3
elif gis_feature_type == "MultiPoint":
vars.gis_feature_type = 4
elif gis_feature_type == "MultiLineString":
vars.gis_feature_type = 5
elif gis_feature_type == "MultiPolygon":
vars.gis_feature_type = 6
elif gis_feature_type == "GeometryCollection":
vars.gis_feature_type = 7
try:
centroid_point = shape.centroid
vars.lon = centroid_point.x
vars.lat = centroid_point.y
bounds = shape.bounds
vars.lon_min = bounds[0]
vars.lat_min = bounds[1]
vars.lon_max = bounds[2]
vars.lat_max = bounds[3]
except:
form.errors.gis_feature_type = messages.centroid_error
if current.deployment_settings.get_gis_spatialdb():
# Also populate the spatial field
vars.the_geom = vars.wkt
elif (vars.lon is None and vars.lat is None) or \
(vars.lon == "" and vars.lat == ""):
# No Geometry available
# Don't clobber existing records (e.g. in Prepop)
#vars.gis_feature_type = "0"
# Cannot create WKT, so Skip
return
else:
# Point
vars.gis_feature_type = "1"
if vars.lat is None or vars.lat == "":
form.errors["lat"] = messages.lat_empty
elif vars.lon is None or vars.lon == "":
form.errors["lon"] = messages.lon_empty
else:
vars.wkt = "POINT(%(lon)s %(lat)s)" % vars
if "lon_min" not in vars or vars.lon_min is None:
vars.lon_min = vars.lon
if "lon_max" not in vars or vars.lon_max is None:
vars.lon_max = vars.lon
if "lat_min" not in vars or vars.lat_min is None:
vars.lat_min = vars.lat
if "lat_max" not in vars or vars.lat_max is None:
vars.lat_max = vars.lat
return
# -------------------------------------------------------------------------
@staticmethod
def query_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
"""
Returns a query of all Locations inside the given bounding box
"""
table = current.s3db.gis_location
query = (table.lat_min <= lat_max) & \
(table.lat_max >= lat_min) & \
(table.lon_min <= lon_max) & \
(table.lon_max >= lon_min)
return query
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
"""
Returns Rows of Locations whose shape intersects the given bbox.
"""
query = current.gis.query_features_by_bbox(lon_min,
lat_min,
lon_max,
lat_max)
return current.db(query).select()
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_shape(shape):
"""
Returns Rows of locations which intersect the given shape.
Relies on Shapely for wkt parsing and intersection.
@ToDo: provide an option to use PostGIS/Spatialite
"""
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
table = current.s3db.gis_location
in_bbox = current.gis.query_features_by_bbox(*shape.bounds)
has_wkt = (table.wkt != None) & (table.wkt != "")
for loc in current.db(in_bbox & has_wkt).select():
try:
location_shape = wkt_loads(loc.wkt)
if location_shape.intersects(shape):
yield loc
except ReadingError:
s3_debug("Error reading wkt of location with id", loc.id)
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_latlon(lat, lon):
"""
Returns a generator of locations whose shape intersects the given LatLon.
Relies on Shapely.
@todo: provide an option to use PostGIS/Spatialite
"""
from shapely.geometry import point
return current.gis.get_features_by_shape(point.Point(lon, lat))
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_feature(feature):
"""
Returns all Locations whose geometry intersects the given feature.
Relies on Shapely.
@ToDo: provide an option to use PostGIS/Spatialite
"""
from shapely.wkt import loads as wkt_loads
shape = wkt_loads(feature.wkt)
return current.gis.get_features_by_shape(shape)
# -------------------------------------------------------------------------
@staticmethod
def set_all_bounds():
"""
Sets bounds for all locations without them.
If shapely is present, and a location has wkt, bounds of the geometry
are used. Otherwise, the (lat, lon) are used as bounds.
"""
try:
from shapely.wkt import loads as wkt_loads
SHAPELY = True
except:
SHAPELY = False
db = current.db
table = current.s3db.gis_location
# Query to find all locations without bounds set
no_bounds = (table.lon_min == None) & \
(table.lat_min == None) & \
(table.lon_max == None) & \
(table.lat_max == None) & \
(table.lat != None) & \
(table.lon != None)
if SHAPELY:
# Refine to those locations with a WKT field
wkt_no_bounds = no_bounds & (table.wkt != None) & (table.wkt != "")
for location in db(wkt_no_bounds).select(table.wkt):
try :
shape = wkt_loads(location.wkt)
except:
s3_debug("Error reading WKT", location.wkt)
continue
bounds = shape.bounds
table[location.id] = dict(
lon_min = bounds[0],
lat_min = bounds[1],
lon_max = bounds[2],
lat_max = bounds[3],
)
# Anything left, we assume is a Point, so set the bounds to be the same
db(no_bounds).update(lon_min=table.lon,
lat_min=table.lat,
lon_max=table.lon,
lat_max=table.lat)
# -------------------------------------------------------------------------
@staticmethod
def simplify(wkt,
tolerance=0.01,
preserve_topology=True,
output="wkt",
decimals=4
):
"""
Simplify a complex Polygon using the Douglas-Peucker algorithm
- NB This uses Python, better performance will be gained by doing
this direct from the database if you are using PostGIS:
ST_Simplify() is available as
db(query).select(table.the_geom.st_simplify(tolerance).st_astext().with_alias('wkt')).first().wkt
db(query).select(table.the_geom.st_simplify(tolerance).st_asgeojson().with_alias('geojson')).first().geojson
@param wkt: the WKT string to be simplified (usually coming from a gis_location record)
@param tolerance: how aggressive a simplification to perform
@param preserve_topology: whether the simplified geometry should be maintained
@param output: whether to output as WKT or GeoJSON format
@param decimals: the number of decimal places to include in the output
"""
from shapely.geometry import Point, LineString, Polygon, MultiPolygon
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
s3_debug("S3GIS", "Upgrade Shapely for Performance enhancements")
try:
shape = wkt_loads(wkt)
except:
wkt = wkt[10] if wkt else wkt
s3_debug("Invalid Shape: %s" % wkt)
return None
shape = shape.simplify(tolerance, preserve_topology)
# Limit the number of decimal places
formatter = ".%sf" % decimals
def shrink_polygon(shape):
""" Helper Function """
points = shape.exterior.coords
coords = []
cappend = coords.append
for point in points:
x = float(format(point[0], formatter))
y = float(format(point[1], formatter))
cappend((x, y))
return Polygon(LineString(coords))
geom_type = shape.geom_type
if geom_type == "MultiPolygon":
polygons = shape.geoms
p = []
pappend = p.append
for polygon in polygons:
pappend(shrink_polygon(polygon))
shape = MultiPolygon([s for s in p])
elif geom_type == "Polygon":
shape = shrink_polygon(shape)
elif geom_type == "LineString":
points = line.coords
for point in points:
x = float(format(point[0], formatter))
y = float(format(point[1], formatter))
cappend((x, y))
shape = LineString(coords)
elif geom_type == "Point":
x = float(format(shape.x, formatter))
y = float(format(shape.y, formatter))
shape = Point(x, y)
else:
s3_debug("Cannot yet shrink Geometry: %s" % geom_type)
# Output
if output == "wkt":
output = shape.to_wkt()
elif output == "geojson":
from ..geojson import dumps
# Compact Encoding
output = dumps(shape, separators=(",", ":"))
return output
# -------------------------------------------------------------------------
def show_map( self,
height = None,
width = None,
bbox = {},
lat = None,
lon = None,
zoom = None,
projection = None,
add_feature = False,
add_feature_active = False,
add_polygon = False,
add_polygon_active = False,
features = [],
feature_queries = [],
feature_resources = [],
wms_browser = {},
catalogue_layers = False,
legend = False,
toolbar = False,
search = False,
googleEarth = False,
googleStreetview = False,
mouse_position = "normal",
print_tool = {},
mgrs = {},
window = False,
window_hide = False,
closable = True,
maximizable = True,
collapsed = False,
location_selector = False,
plugins = None,
):
"""
Returns the HTML to display a map
Normally called in the controller as: map = gis.show_map()
In the view, put: {{=XML(map)}}
@param height: Height of viewport (if not provided then the default deployment setting is used)
@param width: Width of viewport (if not provided then the default deployment setting is used)
@param bbox: default Bounding Box of viewport (if not provided then the Lat/Lon/Zoom are used) (Dict):
{
"max_lat" : float,
"max_lon" : float,
"min_lat" : float,
"min_lon" : float
}
@param lat: default Latitude of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param lon: default Longitude of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param zoom: default Zoom level of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param projection: EPSG code for the Projection to use (if not provided then the default setting from the Map Service Catalogue is used)
@param add_feature: Whether to include a DrawFeature control to allow adding a marker to the map
@param add_feature_active: Whether the DrawFeature control should be active by default
@param add_polygon: Whether to include a DrawFeature control to allow drawing a polygon over the map
@param add_polygon_active: Whether the DrawFeature control should be active by default
@param features: Simple Features to overlay on Map (no control over appearance & not interactive)
[{
"lat": lat,
"lon": lon
}]
@param feature_queries: Feature Queries to overlay onto the map & their options (List of Dicts):
[{
"name" : T("MyLabel"), # A string: the label for the layer
"query" : query, # A gluon.sql.Rows of gis_locations, which can be from a simple query or a Join.
# Extra fields can be added for 'popup_url', 'popup_label' & either
# 'marker' (url/height/width) or 'shape' (with optional 'colour' & 'size')
"active" : True, # Is the feed displayed upon load or needs ticking to load afterwards?
"marker" : None, # Optional: A per-Layer marker query or marker_id for the icon used to display the feature
"opacity" : 1, # Optional
"cluster_distance", # Optional
"cluster_threshold" # Optional
}]
@param feature_resources: REST URLs for (filtered) resources to overlay onto the map & their options (List of Dicts):
[{
"name" : T("MyLabel"), # A string: the label for the layer
"id" : "search", # A string: the id for the layer (for manipulation by JavaScript)
"url" : "/eden/module/resource.geojson?filter", # A URL to load the resource
"active" : True, # Is the feed displayed upon load or needs ticking to load afterwards?
"marker" : None, # Optional: A per-Layer marker dict for the icon used to display the feature
"opacity" : 1, # Optional
"cluster_distance", # Optional
"cluster_threshold" # Optional
}]
@param wms_browser: WMS Server's GetCapabilities & options (dict)
{
"name": T("MyLabel"), # Name for the Folder in LayerTree
"url": string # URL of GetCapabilities
}
@param catalogue_layers: Show all the enabled Layers from the GIS Catalogue
Defaults to False: Just show the default Base layer
@param legend: Show the Legend panel
@param toolbar: Show the Icon Toolbar of Controls
@param search: Show the Geonames search box
@param googleEarth: Include a Google Earth Panel
@param googleStreetview: Include the ability to click to open up StreetView in a popup at that location
@param mouse_position: Show the current coordinates in the bottom-right of the map. 3 Options: 'normal' (default), 'mgrs' (MGRS), False (off)
@param print_tool: Show a print utility (NB This requires server-side support: http://eden.sahanafoundation.org/wiki/BluePrintGISPrinting)
{
"url": string, # URL of print service (e.g. http://localhost:8080/geoserver/pdf/)
"mapTitle": string, # Title for the Printed Map (optional)
"subTitle": string # subTitle for the Printed Map (optional)
}
@param mgrs: Use the MGRS Control to select PDFs
{
"name": string, # Name for the Control
"url": string # URL of PDF server
}
@ToDo: Also add MGRS Search support: http://gxp.opengeo.org/master/examples/mgrs.html
@param window: Have viewport pop out of page into a resizable window
@param window_hide: Have the window hidden by default, ready to appear (e.g. on clicking a button)
@param closable: In Window mode, whether the window is closable or not
@param collapsed: Start the Tools panel (West region) collapsed
@param location_selector: This Map is being instantiated within the LocationSelectorWidget
@param plugins: an iterable of objects which support the following methods:
.addToMapWindow(items)
.setup(map)
"""
request = current.request
response = current.response
if not response.warning:
response.warning = ""
s3 = response.s3
session = current.session
T = current.T
db = current.db
s3db = current.s3db
auth = current.auth
cache = s3db.cache
settings = current.deployment_settings
public_url = settings.get_base_public_url()
cachetable = s3db.gis_cache
MAP_ADMIN = auth.s3_has_role(session.s3.system_roles.MAP_ADMIN)
# Defaults
# Also in static/S3/s3.gis.js
# http://dev.openlayers.org/docs/files/OpenLayers/Strategy/Cluster-js.html
self.cluster_distance = 20 # pixels
self.cluster_threshold = 2 # minimum # of features to form a cluster
# Support bookmarks (such as from the control)
# - these over-ride the arguments
vars = request.vars
# Read configuration
config = GIS.get_config()
if height:
map_height = height
else:
map_height = settings.get_gis_map_height()
if width:
map_width = width
else:
map_width = settings.get_gis_map_width()
if (bbox
and (-90 < bbox["max_lat"] < 90)
and (-90 < bbox["min_lat"] < 90)
and (-180 < bbox["max_lon"] < 180)
and (-180 < bbox["min_lon"] < 180)
):
# We have sane Bounds provided, so we should use them
pass
else:
# No bounds or we've been passed bounds which aren't sane
bbox = None
# Use Lat/Lon to center instead
if "lat" in vars and vars.lat:
lat = float(vars.lat)
if lat is None or lat == "":
lat = config.lat
if "lon" in vars and vars.lon:
lon = float(vars.lon)
if lon is None or lon == "":
lon = config.lon
if "zoom" in request.vars:
zoom = int(vars.zoom)
if not zoom:
zoom = config.zoom
if not projection:
projection = config.epsg
if projection not in (900913, 4326):
# Test for Valid Projection file in Proj4JS library
projpath = os.path.join(
request.folder, "static", "scripts", "gis", "proj4js", \
"lib", "defs", "EPSG%s.js" % projection
)
try:
f = open(projpath, "r")
f.close()
except:
if projection:
response.warning = \
T("Map not available: Projection %(projection)s not supported - please add definition to %(path)s") % \
dict(projection = "'%s'" % projection,
path= "/static/scripts/gis/proj4js/lib/defs")
else:
response.warning = \
T("Map not available: No Projection configured")
return None
units = config.units
maxResolution = config.maxResolution
maxExtent = config.maxExtent
numZoomLevels = config.zoom_levels
marker_default = Storage(image = config.marker_image,
height = config.marker_height,
width = config.marker_width,
url = URL(c="static", f="img",
args=["markers", config.marker_image]))
markers = {}
#####
# CSS
#####
# All Loaded as-standard to avoid delays in page loading
######
# HTML
######
html = DIV(_id="map_wrapper")
html_append = html.append
# Map (Embedded not Window)
html_append(DIV(_id="map_panel"))
# Status Reports
html_append(TABLE(TR(
#TD(
# # Somewhere to report details of OSM File Features via on_feature_hover()
# DIV(_id="status_osm"),
# _style="border: 0px none ;", _valign="top",
#),
TD(
# Somewhere to report whether KML feed is using cached copy or completely inaccessible
DIV(_id="status_kml"),
# Somewhere to report if Files are not found
DIV(_id="status_files"),
_style="border: 0px none ;", _valign="top",
)
)))
#########
# Scripts
#########
# JS Loader
html_append(SCRIPT(_type="text/javascript",
_src=URL(c="static", f="scripts/yepnope.1.5.4-min.js")))
scripts = []
scripts_append = scripts.append
ready = ""
def add_javascript(script, ready=""):
if type(script) == SCRIPT:
if ready:
ready = """%s
%s""" % (ready, script)
else:
ready = script
elif script.startswith("http"):
scripts_append(script)
else:
script = URL(c="static", f=script)
scripts_append(script)
debug = s3.debug
if debug:
if projection not in (900913, 4326):
add_javascript("scripts/gis/proj4js/lib/proj4js-combined.js")
add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)
add_javascript("scripts/gis/openlayers/lib/OpenLayers.js")
add_javascript("scripts/gis/cdauth.js")
add_javascript("scripts/gis/osm_styles.js")
add_javascript("scripts/gis/GeoExt/lib/GeoExt.js")
add_javascript("scripts/gis/GeoExt/ux/GeoNamesSearchCombo.js")
add_javascript("scripts/gis/gxp/RowExpander.js")
add_javascript("scripts/gis/gxp/widgets/NewSourceWindow.js")
add_javascript("scripts/gis/gxp/plugins/LayerSource.js")
add_javascript("scripts/gis/gxp/plugins/WMSSource.js")
add_javascript("scripts/gis/gxp/plugins/Tool.js")
add_javascript("scripts/gis/gxp/plugins/AddLayers.js")
add_javascript("scripts/gis/gxp/plugins/RemoveLayer.js")
if mouse_position == "mgrs":
add_javascript("scripts/gis/usng2.js")
add_javascript("scripts/gis/MP.js")
pass
else:
if projection not in (900913, 4326):
add_javascript("scripts/gis/proj4js/lib/proj4js-compressed.js")
add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)
add_javascript("scripts/gis/OpenLayers.js")
add_javascript("scripts/gis/GeoExt.js")
if mouse_position == "mgrs":
add_javascript("scripts/gis/MGRS.min.js")
#######
# Tools
#######
# Toolbar
if toolbar:
toolbar = '''S3.gis.toolbar=true\n'''
else:
toolbar = ""
# @ToDo: Could we get this automatically?
if location_selector:
loc_select = '''S3.gis.loc_select=true\n'''
else:
loc_select = ""
# MGRS PDF Browser
if mgrs:
mgrs_name = '''S3.gis.mgrs_name='%s'\n''' % mgrs["name"]
mgrs_url = '''S3.gis.mgrs_url='%s'\n''' % mgrs["url"]
else:
mgrs_name = ""
mgrs_url = ""
# Legend panel
if legend:
legend = '''i18n.gis_legend='%s'\n''' % T("Legend")
else:
legend = ""
# Draw Feature Controls
if add_feature:
if add_feature_active:
draw_feature = '''S3.gis.draw_feature='active'\n'''
else:
draw_feature = '''S3.gis.draw_feature='inactive'\n'''
else:
draw_feature = ""
if add_polygon:
if add_polygon_active:
draw_polygon = '''S3.gis.draw_polygon='active'\n'''
else:
draw_polygon = '''S3.gis.draw_polygon='inactive'\n'''
else:
draw_polygon = ""
authenticated = ""
config_id = ""
if auth.is_logged_in():
authenticated = '''S3.auth=true\n'''
if MAP_ADMIN or \
(config.pe_id == auth.user.pe_id):
# Personal config or MapAdmin, so enable Save Button for Updates
config_id = '''S3.gis.config_id=%i\n''' % config.id
# Upload Layer
if settings.get_gis_geoserver_password():
upload_layer = '''i18n.gis_uploadlayer='Upload Shapefile'\n'''
add_javascript("scripts/gis/gxp/FileUploadField.js")
add_javascript("scripts/gis/gxp/widgets/LayerUploadPanel.js")
else:
upload_layer = ""
# Layer Properties
layer_properties = '''i18n.gis_properties='Layer Properties'\n'''
# Search
if search:
search = '''i18n.gis_search='%s'\n''' % T("Search location in Geonames")
#'''i18n.gis_search_no_internet="%s"''' % T("Geonames.org search requires Internet connectivity!")
else:
search = ""
# WMS Browser
if wms_browser:
wms_browser_name = '''S3.gis.wms_browser_name='%s'\n''' % wms_browser["name"]
# urlencode the URL
wms_browser_url = '''S3.gis.wms_browser_url='%s'\n''' % urllib.quote(wms_browser["url"])
else:
wms_browser_name = ""
wms_browser_url = ""
# Mouse Position
if not mouse_position:
mouse_position = ""
elif mouse_position == "mgrs":
mouse_position = '''S3.gis.mouse_position='mgrs'\n'''
else:
mouse_position = '''S3.gis.mouse_position=true\n'''
# OSM Authoring
if config.osm_oauth_consumer_key and \
config.osm_oauth_consumer_secret:
osm_auth = '''S3.gis.osm_oauth='%s'\n''' % T("Zoom in closer to Edit OpenStreetMap layer")
else:
osm_auth = ""
# Print
# NB This isn't too-flexible a method. We're now focussing on print.css
# If we do come back to it, then it should be moved to static
if print_tool:
url = print_tool["url"]
if "title" in print_tool:
mapTitle = unicode(print_tool["mapTitle"])
else:
mapTitle = unicode(T("Map from Sahana Eden"))
if "subtitle" in print_tool:
subTitle = unicode(print_tool["subTitle"])
else:
subTitle = unicode(T("Printed from Sahana Eden"))
if auth.is_logged_in():
creator = unicode(auth.user.email)
else:
creator = ""
script = u"".join(("""
if (typeof(printCapabilities) != 'undefined') {
// info.json from script headers OK
printProvider = new GeoExt.data.PrintProvider({
//method: 'POST',
//url: '""", url, """',
method: 'GET', // 'POST' recommended for production use
capabilities: printCapabilities, // from the info.json returned from the script headers
customParams: {
mapTitle: '""", mapTitle, """',
subTitle: '""", subTitle, """',
creator: '""", creator, """'
}
});
// Our print page. Stores scale, center and rotation and gives us a page
// extent feature that we can add to a layer.
printPage = new GeoExt.data.PrintPage({
printProvider: printProvider
});
//var printExtent = new GeoExt.plugins.PrintExtent({
// printProvider: printProvider
//});
// A layer to display the print page extent
//var pageLayer = new OpenLayers.Layer.Vector('""", unicode(T("Print Extent")), """');
//pageLayer.addFeatures(printPage.feature);
//pageLayer.setVisibility(false);
//map.addLayer(pageLayer);
//var pageControl = new OpenLayers.Control.TransformFeature();
//map.addControl(pageControl);
//map.setOptions({
// eventListeners: {
// recenter/resize page extent after pan/zoom
// 'moveend': function() {
// printPage.fit(mapPanel, true);
// }
// }
//});
// The form with fields controlling the print output
S3.gis.printFormPanel = new Ext.form.FormPanel({
title: '""", unicode(T("Print Map")), """',
rootVisible: false,
split: true,
autoScroll: true,
collapsible: true,
collapsed: true,
collapseMode: 'mini',
lines: false,
bodyStyle: 'padding:5px',
labelAlign: 'top',
defaults: {anchor: '100%%'},
listeners: {
'expand': function() {
//if (null == mapPanel.map.getLayersByName('""", unicode(T("Print Extent")), """')[0]) {
// mapPanel.map.addLayer(pageLayer);
//}
if (null == mapPanel.plugins[0]) {
//map.addLayer(pageLayer);
//pageControl.activate();
//mapPanel.plugins = [ new GeoExt.plugins.PrintExtent({
// printProvider: printProvider,
// map: map,
// layer: pageLayer,
// control: pageControl
//}) ];
//mapPanel.plugins[0].addPage();
}
},
'collapse': function() {
//mapPanel.map.removeLayer(pageLayer);
//if (null != mapPanel.plugins[0]) {
// map.removeLayer(pageLayer);
// mapPanel.plugins[0].removePage(mapPanel.plugins[0].pages[0]);
// mapPanel.plugins = [];
//}
}
},
items: [{
xtype: 'textarea',
name: 'comment',
value: '',
fieldLabel: '""", unicode(T("Comment")), """',
plugins: new GeoExt.plugins.PrintPageField({
printPage: printPage
})
}, {
xtype: 'combo',
store: printProvider.layouts,
displayField: 'name',
fieldLabel: '""", T("Layout").decode("utf-8"), """',
typeAhead: true,
mode: 'local',
triggerAction: 'all',
plugins: new GeoExt.plugins.PrintProviderField({
printProvider: printProvider
})
}, {
xtype: 'combo',
store: printProvider.dpis,
displayField: 'name',
fieldLabel: '""", unicode(T("Resolution")), """',
tpl: '<tpl for="."><div class="x-combo-list-item">{name} dpi</div></tpl>',
typeAhead: true,
mode: 'local',
triggerAction: 'all',
plugins: new GeoExt.plugins.PrintProviderField({
printProvider: printProvider
}),
// the plugin will work even if we modify a combo value
setValue: function(v) {
v = parseInt(v) + ' dpi';
Ext.form.ComboBox.prototype.setValue.apply(this, arguments);
}
//}, {
// xtype: 'combo',
// store: printProvider.scales,
// displayField: 'name',
// fieldLabel: '""", unicode(T("Scale")), """',
// typeAhead: true,
// mode: 'local',
// triggerAction: 'all',
// plugins: new GeoExt.plugins.PrintPageField({
// printPage: printPage
// })
//}, {
// xtype: 'textfield',
// name: 'rotation',
// fieldLabel: '""", unicode(T("Rotation")), """',
// plugins: new GeoExt.plugins.PrintPageField({
// printPage: printPage
// })
}],
buttons: [{
text: '""", unicode(T("Create PDF")), """',
handler: function() {
// the PrintExtent plugin is the mapPanel's 1st plugin
//mapPanel.plugins[0].print();
// convenient way to fit the print page to the visible map area
printPage.fit(mapPanel, true);
// print the page, including the legend, where available
if (null == legendPanel) {
printProvider.print(mapPanel, printPage);
} else {
printProvider.print(mapPanel, printPage, {legend: legendPanel});
}
}
}]
});
} else {
// Display error diagnostic
S3.gis.printFormPanel = new Ext.Panel ({
title: '""", unicode(T("Print Map")), """',
rootVisible: false,
split: true,
autoScroll: true,
collapsible: true,
collapsed: true,
collapseMode: 'mini',
lines: false,
bodyStyle: 'padding:5px',
labelAlign: 'top',
defaults: {anchor: '100%'},
html: '""", unicode(T("Printing disabled since server not accessible")), """: <BR />""", unicode(url), """'
});
}
"""))
ready = """%s
%s""" % (ready, script)
script = "%sinfo.json?var=printCapabilities" % url
scripts_append(script)
##########
# Settings
##########
# Layout
s3_gis_window = ""
s3_gis_windowHide = ""
if not closable:
s3_gis_windowNotClosable = '''S3.gis.windowNotClosable=true\n'''
else:
s3_gis_windowNotClosable = ""
if window:
s3_gis_window = '''S3.gis.window=true\n'''
if window_hide:
s3_gis_windowHide = '''S3.gis.windowHide=true\n'''
if maximizable:
maximizable = '''S3.gis.maximizable=true\n'''
else:
maximizable = '''S3.gis.maximizable=false\n'''
# Collapsed
if collapsed:
collapsed = '''S3.gis.west_collapsed=true\n'''
else:
collapsed = ""
# Bounding Box
if bbox:
# Calculate from Bounds
center = '''S3.gis.lat,S3.gis.lon
S3.gis.bottom_left=[%f,%f]
S3.gis.top_right=[%f,%f]
''' % (bbox["min_lon"], bbox["min_lat"], bbox["max_lon"], bbox["max_lat"])
else:
center = '''S3.gis.lat=%s
S3.gis.lon=%s
''' % (lat, lon)
########
# Layers
########
# =====================================================================
# Overlays
#
# Duplicate Features to go across the dateline?
# @ToDo: Action this again (e.g. for DRRPP)
if settings.get_gis_duplicate_features():
duplicate_features = '''S3.gis.duplicate_features=true'''
else:
duplicate_features = ""
# ---------------------------------------------------------------------
# Features
#
# Simple Features added to the Draft layer
# - used by the Location Selector
#
_features = ""
if features:
_features = '''S3.gis.features=new Array()\n'''
counter = -1
for feature in features:
counter = counter + 1
if feature["lat"] and feature["lon"]:
# Generate JS snippet to pass to static
_features += '''S3.gis.features[%i]={
lat:%f,
lon:%f
}\n''' % (counter,
feature["lat"],
feature["lon"])
# ---------------------------------------------------------------------
# Feature Queries
#
# These can be Rows or Storage()
# NB These considerations need to be taken care of before arriving here:
# Security of data
# Localisation of name/popup_label
#
if feature_queries:
layers_feature_queries = '''
S3.gis.layers_feature_queries=new Array()'''
counter = -1
mtable = s3db.gis_marker
else:
layers_feature_queries = ""
for layer in feature_queries:
counter = counter + 1
name = str(layer["name"])
name_safe = re.sub("\W", "_", name)
# Lat/Lon via Join or direct?
try:
layer["query"][0].gis_location.lat
join = True
except:
join = False
# Push the Features into a temporary table in order to have them accessible via GeoJSON
# @ToDo: Maintenance Script to clean out old entries (> 24 hours?)
fqtable = s3db.gis_feature_query
cname = "%s_%s_%s" % (name_safe,
request.controller,
request.function)
# Clear old records
query = (fqtable.name == cname)
if auth.user:
created_by = auth.user.id
else:
# Anonymous
# @ToDo: A deployment with many Anonymous Feature Queries being
# accessed will need to change this design - e.g. use session ID instead
created_by = None
query = query & (fqtable.created_by == created_by)
db(query).delete()
for row in layer["query"]:
rowdict = {"name" : cname}
if join:
rowdict["lat"] = row.gis_location.lat
rowdict["lon"] = row.gis_location.lon
else:
rowdict["lat"] = row["lat"]
rowdict["lon"] = row["lon"]
if "popup_url" in row:
rowdict["popup_url"] = row["popup_url"]
if "popup_label" in row:
rowdict["popup_label"] = row["popup_label"]
if "marker" in row:
rowdict["marker_url"] = URL(c="static", f="img",
args=["markers",
row["marker"].image])
rowdict["marker_height"] = row["marker"].height
rowdict["marker_width"] = row["marker"].width
else:
if "marker_url" in row:
rowdict["marker_url"] = row["marker_url"]
if "marker_height" in row:
rowdict["marker_height"] = row["marker_height"]
if "marker_width" in row:
rowdict["marker_width"] = row["marker_width"]
if "shape" in row:
rowdict["shape"] = row["shape"]
if "size" in row:
rowdict["size"] = row["size"]
if "colour" in row:
rowdict["colour"] = row["colour"]
if "opacity" in row:
rowdict["opacity"] = row["opacity"]
record_id = fqtable.insert(**rowdict)
if not created_by:
auth.s3_make_session_owner(fqtable, record_id)
# URL to retrieve the data
url = "%s.geojson?feature_query.name=%s&feature_query.created_by=%s" % \
(URL(c="gis", f="feature_query"),
cname,
created_by)
if "active" in layer and not layer["active"]:
visibility = ''',
"visibility":false'''
else:
visibility = ""
markerLayer = ""
if "marker" in layer:
# per-Layer Marker
marker = layer["marker"]
if isinstance(marker, int):
# integer (marker_id) not row
query = (mtable.id == marker)
marker = db(query).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1),
cache=cache).first()
if marker:
markerLayer = ''',
"marker_url":"%s",
"marker_height":%i,
"marker_width":%i''' % (marker["image"], marker["height"], marker["width"])
else:
markerLayer = ""
if "opacity" in layer and layer["opacity"] != 1:
opacity = ''',
"opacity":%.1f''' % layer["opacity"]
else:
opacity = ""
if "cluster_distance" in layer and layer["cluster_distance"] != self.cluster_distance:
cluster_distance = ''',
"cluster_distance":%i''' % layer["cluster_distance"]
else:
cluster_distance = ""
if "cluster_threshold" in layer and layer["cluster_threshold"] != self.cluster_threshold:
cluster_threshold = ''',
"cluster_threshold":%i''' % layer["cluster_threshold"]
else:
cluster_threshold = ""
# Generate JS snippet to pass to static
layers_feature_queries += '''
S3.gis.layers_feature_queries[%i]={
"name":"%s",
"url":"%s"%s%s%s%s%s
}
''' % (counter,
name,
url,
visibility,
markerLayer,
opacity,
cluster_distance,
cluster_threshold)
# ---------------------------------------------------------------------
# Feature Resources
#
# REST URLs to back-end resources
#
if feature_resources:
layers_feature_resources = '''
S3.gis.layers_feature_resources=new Array()'''
counter = -1
else:
layers_feature_resources = ""
for layer in feature_resources:
counter = counter + 1
name = str(layer["name"])
id = str(layer["id"])
id = re.sub("\W", "_", id)
# URL to retrieve the data
url = layer["url"]
# Optimise the query & & tell back-end not to add the type to the tooltips
options = "components=None&maxdepth=0&references=location_id&fields=name&label_off=1"
if "?" in url:
url = "%s&%s" % (url, options)
else:
url = "%s?%s" % (url, options)
if "active" in layer and not layer["active"]:
visibility = ''',
"visibility":false'''
else:
visibility = ""
if "opacity" in layer and layer["opacity"] != 1:
opacity = ''',
"opacity":%.1f''' % layer["opacity"]
else:
opacity = ""
if "cluster_distance" in layer and layer["cluster_distance"] != self.cluster_distance:
cluster_distance = ''',
"cluster_distance":%i''' % layer["cluster_distance"]
else:
cluster_distance = ""
if "cluster_threshold" in layer and layer["cluster_threshold"] != self.cluster_threshold:
cluster_threshold = ''',
"cluster_threshold":%i''' % layer["cluster_threshold"]
else:
cluster_threshold = ""
if "marker" in layer:
marker = layer["marker"]
markerLayer = ''',
"marker_image":"%s",
"marker_height":%i,
"marker_width":%i''' % (marker["image"], marker["height"], marker["width"])
else:
markerLayer = ""
# Generate JS snippet to pass to static
layers_feature_resources += '''
S3.gis.layers_feature_resources[%i]={
"name":"%s",
"id":"%s",
"url":"%s"%s%s%s%s%s
}
''' % (counter,
name,
id,
url,
visibility,
markerLayer,
opacity,
cluster_distance,
cluster_threshold)
if catalogue_layers:
# Add all Layers from the Catalogue
layer_types = [
ArcRESTLayer,
BingLayer,
EmptyLayer,
GoogleLayer,
OSMLayer,
TMSLayer,
WMSLayer,
XYZLayer,
JSLayer,
ThemeLayer,
GeoJSONLayer,
GPXLayer,
CoordinateLayer,
GeoRSSLayer,
KMLLayer,
OpenWeatherMapLayer,
WFSLayer,
FeatureLayer,
]
else:
# Add just the default Base Layer
s3.gis.base = True
layer_types = []
ltable = s3db.gis_layer_config
etable = s3db.gis_layer_entity
query = (etable.id == ltable.layer_id) & \
(ltable.config_id == config["id"]) & \
(ltable.base == True) & \
(ltable.enabled == True)
layer = db(query).select(etable.instance_type,
limitby=(0, 1)).first()
if layer:
layer_type = layer.instance_type
if layer_type == "gis_layer_openstreetmap":
layer_types = [OSMLayer]
elif layer_type == "gis_layer_google":
# NB v3 doesn't work when initially hidden
layer_types = [GoogleLayer]
elif layer_type == "gis_layer_arcrest":
layer_types = [ArcRESTLayer]
elif layer_type == "gis_layer_bing":
layer_types = [BingLayer]
elif layer_type == "gis_layer_tms":
layer_types = [TMSLayer]
elif layer_type == "gis_layer_wms":
layer_types = [WMSLayer]
elif layer_type == "gis_layer_xyz":
layer_types = [XYZLayer]
elif layer_type == "gis_layer_empty":
layer_types = [EmptyLayer]
if not layer_types:
layer_types = [EmptyLayer]
layers_config = ""
for LayerType in layer_types:
try:
# Instantiate the Class
layer = LayerType()
layer_type_js = layer.as_javascript()
if layer_type_js:
# Add to the output JS
layers_config = "".join((layers_config,
layer_type_js))
for script in layer.scripts:
if "google.com" in script:
# Uses document.write, so can't load async
script = SCRIPT(_type="text/javascript",
_src=script)
html_append(script)
else:
add_javascript(script, ready=ready)
except Exception, exception:
error = "%s not shown: %s" % (LayerType.__name__, exception)
if debug:
raise HTTP(500, error)
else:
response.warning += error
# WMS getFeatureInfo
# (loads conditionally based on whether queryable WMS Layers have been added)
if s3.gis.get_feature_info:
getfeatureinfo = '''i18n.gis_get_feature_info="%s"
i18n.gis_feature_info="%s"
''' % (T("Get Feature Info"),
T("Feature Info"))
else:
getfeatureinfo = ""
#############
# Main script
#############
# Configure settings to pass through to Static script
# @ToDo: Consider passing this as JSON Objects to allow it to be done dynamically
config_script = "".join((
authenticated,
'''S3.public_url='%s'\n''' % public_url, # Needed just for GoogleEarthPanel
config_id,
s3_gis_window,
s3_gis_windowHide,
s3_gis_windowNotClosable,
maximizable,
collapsed,
toolbar,
loc_select,
'''S3.gis.map_height=%i\n''' % map_height,
'''S3.gis.map_width=%i\n''' % map_width,
'''S3.gis.zoom=%i\n''' % (zoom or 1),
center,
'''S3.gis.projection='%i'\n''' % projection,
'''S3.gis.units='%s'\n''' % units,
'''S3.gis.maxResolution=%f\n'''% maxResolution,
'''S3.gis.maxExtent=[%s]\n''' % maxExtent,
'''S3.gis.numZoomLevels=%i\n''' % numZoomLevels,
'''S3.gis.max_w=%i\n''' % settings.get_gis_marker_max_width(),
'''S3.gis.max_h=%i\n''' % settings.get_gis_marker_max_height(),
mouse_position,
duplicate_features,
wms_browser_name,
wms_browser_url,
mgrs_name,
mgrs_url,
draw_feature,
draw_polygon,
'''S3.gis.marker_default='%s'\n''' % marker_default.image,
'''S3.gis.marker_default_height=%i\n''' % marker_default.height,
'''S3.gis.marker_default_width=%i\n''' % marker_default.width,
osm_auth,
layers_feature_queries,
layers_feature_resources,
_features,
layers_config,
# i18n Labels
legend, # Presence of label turns feature on
search, # Presence of label turns feature on
getfeatureinfo, # Presence of labels turns feature on
upload_layer, # Presence of label turns feature on
layer_properties, # Presence of label turns feature on
'''i18n.gis_requires_login='%s'\n''' % T("Requires Login"),
'''i18n.gis_base_layers='%s'\n''' % T("Base Layers"),
'''i18n.gis_overlays='%s'\n''' % T("Overlays"),
'''i18n.gis_layers='%s'\n''' % T("Layers"),
'''i18n.gis_draft_layer='%s'\n''' % T("Draft Features"),
'''i18n.gis_cluster_multiple='%s'\n''' % T("There are multiple records at this location"),
'''i18n.gis_loading='%s'\n''' % T("Loading"),
'''i18n.gis_length_message='%s'\n''' % T("The length is"),
'''i18n.gis_area_message='%s'\n''' % T("The area is"),
'''i18n.gis_length_tooltip='%s'\n''' % T("Measure Length: Click the points along the path & end with a double-click"),
'''i18n.gis_area_tooltip='%s'\n''' % T("Measure Area: Click the points around the polygon & end with a double-click"),
'''i18n.gis_zoomfull='%s'\n''' % T("Zoom to maximum map extent"),
'''i18n.gis_zoomout='%s'\n''' % T("Zoom Out: click in the map or use the left mouse button and drag to create a rectangle"),
'''i18n.gis_zoomin='%s'\n''' % T("Zoom In: click in the map or use the left mouse button and drag to create a rectangle"),
'''i18n.gis_pan='%s'\n''' % T("Pan Map: keep the left mouse button pressed and drag the map"),
'''i18n.gis_navPrevious='%s'\n''' % T("Previous View"),
'''i18n.gis_navNext='%s'\n''' % T("Next View"),
'''i18n.gis_geoLocate='%s'\n''' % T("Zoom to Current Location"),
'''i18n.gis_draw_feature='%s'\n''' % T("Add Point"),
'''i18n.gis_draw_polygon='%s'\n''' % T("Add Polygon"),
'''i18n.gis_save='%s'\n''' % T("Save: Default Lat, Lon & Zoom for the Viewport"),
'''i18n.gis_potlatch='%s'\n''' % T("Edit the OpenStreetMap data for this area"),
# For S3LocationSelectorWidget
'''i18n.gis_current_location='%s'\n''' % T("Current Location"),
))
html_append(SCRIPT(config_script))
# Static Script
if debug:
add_javascript("scripts/S3/s3.gis.layers.js")
add_javascript("scripts/S3/s3.gis.controls.js")
add_javascript("scripts/S3/s3.gis.js")
else:
add_javascript("scripts/S3/s3.gis.min.js")
# Set up map plugins
# This, and any code it generates is done last
# However, map plugin should not assume this.
if plugins is not None:
for plugin in plugins:
plugin.extend_gis_map(
add_javascript,
html_append # for adding in dynamic configuration, etc.
)
script = "','".join(scripts)
if ready:
ready = '''%s
S3.gis.show_map()''' % ready
else:
ready = "S3.gis.show_map();"
# Tell YepNope to load all our scripts asynchronously & then run the callback
script = '''yepnope({
load:['%s'],
complete:function(){
%s
}
})''' % (script, ready)
html_append(SCRIPT(script))
return html
# =============================================================================
class Marker(object):
"""
Represents a Map Marker
"""
def __init__(self, id=None, layer_id=None):
s3db = current.s3db
mtable = s3db.gis_marker
marker = None
config = None
if id:
# Lookup the Marker details from it's ID
query = (mtable.id == id)
marker = current.db(query).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1),
cache=s3db.cache).first()
elif layer_id:
# Check if we have a Marker for this Layer
config = current.gis.get_config()
ltable = s3db.gis_layer_symbology
query = (ltable.layer_id == layer_id) & \
(ltable.symbology_id == config.symbology_id) & \
(ltable.marker_id == mtable.id)
marker = current.db(query).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1)).first()
if not marker:
# Default Marker
if not config:
config = current.gis.get_config()
self.image = config.marker_image
self.height = config.marker_height
self.width = config.marker_width
else:
self.image = marker.image
self.height = marker.height
self.width = marker.width
# Always lookup URL client-side
#self.url = URL(c="static", f="img",
# args=["markers", marker.image])
def add_attributes_to_output(self, output):
"""
Called by Layer.as_dict()
"""
output["marker_image"] = self.image
output["marker_height"] = self.height
output["marker_width"] = self.width
def as_dict(self):
"""
Called by gis.get_marker()
"""
output = Storage(
image = self.image,
height = self.height,
width = self.width,
)
return output
# =============================================================================
class Projection(object):
"""
Represents a Map Projection
"""
def __init__(self, id=None):
if id:
s3db = current.s3db
table = s3db.gis_projection
query = (table.id == id)
projection = current.db(query).select(table.epsg,
limitby=(0, 1),
cache=s3db.cache).first()
else:
# Default projection
config = current.gis.get_config()
projection = Storage(epsg = config.epsg)
self.epsg = projection.epsg
# =============================================================================
class Layer(object):
"""
Abstract base class for Layers from Catalogue
"""
def __init__(self):
sublayers = []
append = sublayers.append
self.scripts = []
gis = current.response.s3.gis
s3db = current.s3db
s3_has_role = current.auth.s3_has_role
# Read the Layers enabled in the Active Configs
tablename = self.tablename
table = s3db[tablename]
ctable = s3db.gis_config
ltable = s3db.gis_layer_config
fields = table.fields
metafields = s3_all_meta_field_names()
fields = [table[f] for f in fields if f not in metafields]
fappend = fields.append
fappend(ltable.enabled)
fappend(ltable.visible)
fappend(ltable.base)
fappend(ltable.style)
fappend(ctable.pe_type)
query = (table.layer_id == ltable.layer_id) & \
(ltable.config_id == ctable.id) & \
(ltable.config_id.belongs(gis.config.ids))
if gis.base == True:
# Only show the default base layer
if self.tablename == "gis_layer_empty":
# Show even if disabled (as fallback)
query = (table.id > 0)
else:
query = query & (ltable.base == True)
rows = current.db(query).select(orderby=ctable.pe_type,
*fields)
layer_ids = []
lappend = layer_ids.append
SubLayer = self.SubLayer
# Flag to show whether we've set the default baselayer
# (otherwise a config higher in the hierarchy can overrule one lower down)
base = True
for _record in rows:
record = _record[tablename]
# Check if we've already seen this layer
layer_id = record.layer_id
if layer_id in layer_ids:
continue
# Add layer to list of checked
lappend(layer_id)
# Check if layer is enabled
_config = _record["gis_layer_config"]
if not _config.enabled:
continue
# Check user is allowed to access the layer
role_required = record.role_required
if role_required and not s3_has_role(role_required):
continue
# All OK - add SubLayer
record["visible"] = _config.visible
if base and _config.base:
# name can't conflict with OSM/WMS/ArcREST layers
record["_base"] = True
base = False
else:
record["_base"] = False
record["style"] = _config.style
if tablename in ["gis_layer_bing", "gis_layer_google"]:
# SubLayers handled differently
append(record)
else:
append(SubLayer(record))
# Alphasort layers
# - client will only sort within their type: s3.gis.layers.js
self.sublayers = sorted(sublayers, key=lambda row: row.name)
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layers as Javascript
- suitable for inclusion in the HTML page
"""
sublayer_dicts = []
append = sublayer_dicts.append
sublayers = self.sublayers
for sublayer in sublayers:
# Read the output dict for this sublayer
sublayer_dict = sublayer.as_dict()
if sublayer_dict:
# Add this layer to the list of layers for this layer type
append(sublayer_dict)
if sublayer_dicts:
# Output the Layer Type as JSON
layer_type_json = json.dumps(sublayer_dicts,
sort_keys=True,
indent=4)
return '''%s=%s\n''' % (self.js_array, layer_type_json)
else:
return None
# -------------------------------------------------------------------------
def as_json(self):
"""
Output the Layers as JSON
@ToDo: Support layers with SubLayer.as_dict() to pass config
dynamically between server & client
"""
if self.record:
return json.dumps(self.as_dict(), indent=4, sort_keys=True)
else:
return
# -------------------------------------------------------------------------
class SubLayer(object):
def __init__(self, record):
# Ensure all attributes available (even if Null)
self.__dict__.update(record)
del record
self.safe_name = re.sub('[\\"]', "", self.name)
self.marker = Marker(layer_id=self.layer_id)
if hasattr(self, "projection_id"):
self.projection = Projection(self.projection_id)
def setup_clustering(self, output):
gis = current.gis
cluster_distance = gis.cluster_distance
cluster_threshold = gis.cluster_threshold
if self.cluster_distance != cluster_distance:
output["cluster_distance"] = self.cluster_distance
if self.cluster_threshold != cluster_threshold:
output["cluster_threshold"] = self.cluster_threshold
def setup_folder(self, output):
if self.dir:
output["dir"] = self.dir
def setup_folder_and_visibility(self, output):
if not self.visible:
output["visibility"] = False
if self.dir:
output["dir"] = self.dir
def setup_folder_visibility_and_opacity(self, output):
if not self.visible:
output["visibility"] = False
if self.opacity != 1:
output["opacity"] = "%.1f" % self.opacity
if self.dir:
output["dir"] = self.dir
@staticmethod
def add_attributes_if_not_default(output, **values_and_defaults):
# could also write values in debug mode, to check if defaults ignored.
# could also check values are not being overwritten.
for key, (value, defaults) in values_and_defaults.iteritems():
if value not in defaults:
output[key] = value
# -----------------------------------------------------------------------------
class ArcRESTLayer(Layer):
"""
ArcGIS REST Layers from Catalogue
"""
tablename = "gis_layer_arcrest"
js_array = "S3.gis.layers_arcrest"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {
"id": self.layer_id,
"type": "arcrest",
"name": self.safe_name,
"url": self.url,
}
# Attributes which are defaulted client-side if not set
self.setup_folder_and_visibility(output)
self.add_attributes_if_not_default(
output,
layers = (self.layers, (0,)),
transparent = (self.transparent, (True,)),
base = (self.base, (False,)),
_base = (self._base, (False,)),
)
return output
# -----------------------------------------------------------------------------
class BingLayer(Layer):
"""
Bing Layers from Catalogue
"""
tablename = "gis_layer_bing"
js_array = "S3.gis.Bing"
# -------------------------------------------------------------------------
def as_dict(self):
sublayers = self.sublayers
if sublayers:
if Projection().epsg != 900913:
raise Exception("Cannot display Bing layers unless we're using the Spherical Mercator Projection\n")
apikey = current.deployment_settings.get_gis_api_bing()
if not apikey:
raise Exception("Cannot display Bing layers unless we have an API key\n")
# Mandatory attributes
output = {
"ApiKey": apikey
}
for sublayer in sublayers:
# Attributes which are defaulted client-side if not set
if sublayer._base:
# Set default Base layer
output["Base"] = sublayer.type
if sublayer.type == "aerial":
output["Aerial"] = {"name": sublayer.name or "Bing Satellite",
"id": sublayer.layer_id}
elif sublayer.type == "road":
output["Road"] = {"name": sublayer.name or "Bing Roads",
"id": sublayer.layer_id}
elif sublayer.type == "hybrid":
output["Hybrid"] = {"name": sublayer.name or "Bing Hybrid",
"id": sublayer.layer_id}
return output
else:
return None
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
output = self.as_dict()
if output:
result = json.dumps(output, indent=4, sort_keys=True)
if result:
return '''%s=%s\n''' % (self.js_array, result)
return None
# -----------------------------------------------------------------------------
class CoordinateLayer(Layer):
"""
Coordinate Layer from Catalogue
- there should only be one of these
"""
tablename = "gis_layer_coordinate"
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
sublayers = self.sublayers
if sublayers:
sublayer = sublayers[0]
name_safe = re.sub("'", "", sublayer.name)
if sublayer.visible:
visibility = "true"
else:
visibility = "false"
output = '''S3.gis.CoordinateGrid={name:'%s',visibility:%s,id:%s}\n''' % \
(name_safe, visibility, sublayer.layer_id)
return output
else:
return None
# -----------------------------------------------------------------------------
class EmptyLayer(Layer):
"""
Empty Layer from Catalogue
- there should only be one of these
"""
tablename = "gis_layer_empty"
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
sublayers = self.sublayers
if sublayers:
sublayer = sublayers[0]
name = str(current.T(sublayer.name))
name_safe = re.sub("'", "", name)
if sublayer._base:
base = ",base:true"
else:
base = ""
output = '''S3.gis.EmptyLayer={name:'%s',id:%s%s}\n''' % \
(name_safe, sublayer.layer_id, base)
return output
else:
return None
# -----------------------------------------------------------------------------
class FeatureLayer(Layer):
"""
Feature Layers from Catalogue
"""
tablename = "gis_layer_feature"
js_array = "S3.gis.layers_features"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def __init__(self, record):
record_module = record.controller or record.module # Backwards-compatibility
self.skip = False
if record_module is not None:
if record_module not in current.deployment_settings.modules:
# Module is disabled
self.skip = True
if not current.auth.permission.has_permission("read",
c=record_module,
f=record.function or record.resource):
# User has no permission to this resource (in ACL)
self.skip = True
else:
raise Exception("FeatureLayer Record '%s' has no controller" % record.name)
super(FeatureLayer.SubLayer, self).__init__(record)
def as_dict(self):
if self.skip:
# Skip layer
return
controller = self.controller or self.module # Backwards-compatibility
function = self.function or self.resource # Backwards-compatibility
url = "%s.geojson?layer=%i&components=None&maxdepth=0&references=location_id&fields=name" % \
(URL(controller, function), self.id)
if self.filter:
url = "%s&%s" % (url, self.filter)
if self.trackable:
url = "%s&track=1" % url
# Mandatory attributes
output = {
"id": self.layer_id,
# Defaults client-side if not-provided
#"type": "feature",
"name": self.safe_name,
"url": url,
}
#
self.marker.add_attributes_to_output(output)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class GeoJSONLayer(Layer):
"""
GeoJSON Layers from Catalogue
"""
tablename = "gis_layer_geojson"
js_array = "S3.gis.layers_geojson"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {
"id": self.layer_id,
"type": "geojson",
"name": self.safe_name,
"url": self.url,
}
self.marker.add_attributes_to_output(output)
# Attributes which are defaulted client-side if not set
projection = self.projection
if projection.epsg != 4326:
output["projection"] = projection.epsg
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class GeoRSSLayer(Layer):
"""
GeoRSS Layers from Catalogue
"""
tablename = "gis_layer_georss"
js_array = "S3.gis.layers_georss"
def __init__(self):
super(GeoRSSLayer, self).__init__()
GeoRSSLayer.SubLayer.cachetable = current.s3db.gis_cache
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
db = current.db
request = current.request
response = current.response
cachetable = self.cachetable
url = self.url
# Check to see if we should Download layer to the cache
download = True
query = (cachetable.source == url)
existing_cached_copy = db(query).select(cachetable.modified_on,
limitby=(0, 1)).first()
refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
if existing_cached_copy:
modified_on = existing_cached_copy.modified_on
cutoff = modified_on + timedelta(seconds=refresh)
if request.utcnow < cutoff:
download = False
if download:
# Download layer to the Cache
from gluon.tools import fetch
# @ToDo: Call directly without going via HTTP
# @ToDo: Make this async by using S3Task (also use this for the refresh time)
fields = ""
if self.data:
fields = "&data_field=%s" % self.data
if self.image:
fields = "%s&image_field=%s" % (fields, self.image)
_url = "%s%s/update.georss?fetchurl=%s%s" % (current.deployment_settings.get_base_public_url(),
URL(c="gis", f="cache_feed"),
url,
fields)
# Keep Session for local URLs
import Cookie
cookie = Cookie.SimpleCookie()
cookie[response.session_id_name] = response.session_id
current.session._unlock(response)
try:
# @ToDo: Need to commit to not have DB locked with SQLite?
fetch(_url, cookie=cookie)
if existing_cached_copy:
# Clear old selfs which are no longer active
query = (cachetable.source == url) & \
(cachetable.modified_on < cutoff)
db(query).delete()
except Exception, exception:
s3_debug("GeoRSS %s download error" % url, exception)
# Feed down
if existing_cached_copy:
# Use cached copy
# Should we Update timestamp to prevent every
# subsequent request attempting the download?
#query = (cachetable.source == url)
#db(query).update(modified_on=request.utcnow)
pass
else:
response.warning += "%s down & no cached copy available" % url
name_safe = self.safe_name
# Pass the GeoJSON URL to the client
# Filter to the source of this feed
url = "%s.geojson?cache.source=%s" % (URL(c="gis", f="cache_feed"),
url)
# Mandatory attributes
output = {
"id": self.layer_id,
"type": "georss",
"name": name_safe,
"url": url,
}
self.marker.add_attributes_to_output(output)
# Attributes which are defaulted client-side if not set
if self.refresh != 900:
output["refresh"] = self.refresh
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class GoogleLayer(Layer):
"""
Google Layers/Tools from Catalogue
"""
tablename = "gis_layer_google"
js_array = "S3.gis.Google"
# -------------------------------------------------------------------------
def as_dict(self):
sublayers = self.sublayers
if sublayers:
T = current.T
epsg = (Projection().epsg == 900913)
apikey = current.deployment_settings.get_gis_api_google()
debug = current.response.s3.debug
add_script = self.scripts.append
output = {}
for sublayer in sublayers:
# Attributes which are defaulted client-side if not set
if sublayer.type == "earth":
output["Earth"] = str(T("Switch to 3D"))
add_script("http://www.google.com/jsapi?key=%s" % apikey)
add_script(SCRIPT('''try{google && google.load('earth','1')}catch(e){}''', _type="text/javascript"))
if debug:
# Non-debug has this included within GeoExt.js
add_script("scripts/gis/gxp/widgets/GoogleEarthPanel.js")
elif epsg:
# Earth is the only layer which can run in non-Spherical Mercator
# @ToDo: Warning?
if sublayer._base:
# Set default Base layer
output["Base"] = sublayer.type
if sublayer.type == "satellite":
output["Satellite"] = {"name": sublayer.name or "Google Satellite",
"id": sublayer.layer_id}
elif sublayer.type == "maps":
output["Maps"] = {"name": sublayer.name or "Google Maps",
"id": sublayer.layer_id}
elif sublayer.type == "hybrid":
output["Hybrid"] = {"name": sublayer.name or "Google Hybrid",
"id": sublayer.layer_id}
elif sublayer.type == "streetview":
output["StreetviewButton"] = "Click where you want to open Streetview"
elif sublayer.type == "terrain":
output["Terrain"] = {"name": sublayer.name or "Google Terrain",
"id": sublayer.layer_id}
elif sublayer.type == "mapmaker":
output["MapMaker"] = {"name": sublayer.name or "Google MapMaker",
"id": sublayer.layer_id}
elif sublayer.type == "mapmakerhybrid":
output["MapMakerHybrid"] = {"name": sublayer.name or "Google MapMaker Hybrid",
"id": sublayer.layer_id}
if "MapMaker" in output or "MapMakerHybrid" in output:
# Need to use v2 API
# This should be able to be fixed in OpenLayers now since Google have fixed in v3 API:
# http://code.google.com/p/gmaps-api-issues/issues/detail?id=2349#c47
add_script("http://maps.google.com/maps?file=api&v=2&key=%s" % apikey)
else:
# v3 API (3.7 is frozen, 3.8 release & 3.9 is nightly)
add_script("http://maps.google.com/maps/api/js?v=3.7&sensor=false")
if "StreetviewButton" in output:
# Streetview doesn't work with v2 API
output["StreetviewButton"] = str(T("Click where you want to open Streetview"))
output["StreetviewTitle"] = str(T("Street View"))
if debug:
# Non-debug has this included within GeoExt.js
add_script("scripts/gis/gxp/widgets/GoogleStreetViewPanel.js")
return output
else:
return None
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
output = self.as_dict()
if output:
result = json.dumps(output, indent=4, sort_keys=True)
if result:
return '''%s=%s\n''' % (self.js_array, result)
return None
# -----------------------------------------------------------------------------
class GPXLayer(Layer):
"""
GPX Layers from Catalogue
"""
tablename = "gis_layer_gpx"
js_array = "S3.gis.layers_gpx"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
url = URL(c="default", f="download",
args=self.track)
# Mandatory attributes
output = {
"id": self.layer_id,
"name": self.safe_name,
"url": url,
}
self.marker.add_attributes_to_output(output)
self.add_attributes_if_not_default(
output,
waypoints = (self.waypoints, (True,)),
tracks = (self.tracks, (True,)),
routes = (self.routes, (True,)),
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class JSLayer(Layer):
"""
JS Layers from Catalogue
- these are raw Javascript layers for use by expert OpenLayers people
to quickly add/configure new data sources without needing support
from back-end Sahana programmers
"""
tablename = "gis_layer_js"
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
sublayers = self.sublayers
if sublayers:
output = "function addJSLayers() {"
for sublayer in sublayers:
output = '''%s\n%s''' % (output,
sublayer.code)
output = '''%s\n}''' % output
return output
else:
return None
# -----------------------------------------------------------------------------
class KMLLayer(Layer):
"""
KML Layers from Catalogue
"""
tablename = "gis_layer_kml"
js_array = "S3.gis.layers_kml"
# -------------------------------------------------------------------------
def __init__(self):
"Set up the KML cache, should be done once per request"
super(KMLLayer, self).__init__()
# Needed for gis.download_kml()
self.table = current.s3db[self.tablename]
# Can we cache downloaded KML feeds?
# Needed for unzipping & filtering as well
# @ToDo: Should we move this folder to static to speed up access to cached content?
# Do we need to secure it?
cachepath = os.path.join(current.request.folder,
"uploads",
"gis_cache")
if os.path.exists(cachepath):
cacheable = os.access(cachepath, os.W_OK)
else:
try:
os.mkdir(cachepath)
except OSError, os_error:
s3_debug(
"GIS: KML layers cannot be cached: %s %s" % (
cachepath,
os_error
)
)
cacheable = False
else:
cacheable = True
# @ToDo: Migrate to gis_cache
KMLLayer.cachetable = current.s3db.gis_cache2
KMLLayer.cacheable = cacheable
KMLLayer.cachepath = cachepath
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
db = current.db
request = current.request
cachetable = KMLLayer.cachetable
cacheable = KMLLayer.cacheable
cachepath = KMLLayer.cachepath
name = self.name
if cacheable:
_name = urllib2.quote(name)
_name = _name.replace("%", "_")
filename = "%s.file.%s.kml" % (cachetable._tablename,
_name)
# Should we download a fresh copy of the source file?
download = True
query = (cachetable.name == name)
cached = db(query).select(cachetable.modified_on,
limitby=(0, 1)).first()
refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
if cached:
modified_on = cached.modified_on
cutoff = modified_on + timedelta(seconds=refresh)
if request.utcnow < cutoff:
download = False
if download:
# Download file (async, if workers alive)
current.s3task.async("gis_download_kml",
args=[self.id, filename])
if cached:
db(query).update(modified_on=request.utcnow)
else:
cachetable.insert(name=name, file=filename)
url = URL(c="default", f="download",
args=[filename])
else:
# No caching possible (e.g. GAE), display file direct from remote (using Proxy)
# (Requires OpenLayers.Layer.KML to be available)
url = self.url
output = dict(
id = self.layer_id,
name = self.safe_name,
url = url,
)
self.add_attributes_if_not_default(
output,
title = (self.title, ("name", None, "")),
body = (self.body, ("description", None)),
refresh = (self.refresh, (900,)),
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class OSMLayer(Layer):
"""
OpenStreetMap Layers from Catalogue
@ToDo: Provide a catalogue of standard layers which are fully-defined
in static & can just have name over-ridden, as well as
fully-custom layers.
"""
tablename = "gis_layer_openstreetmap"
js_array = "S3.gis.layers_osm"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
if Projection().epsg != 900913:
# Cannot display OpenStreetMap layers unless we're using the Spherical Mercator Projection
return {}
output = {
"id": self.layer_id,
"name": self.safe_name,
"url1": self.url1,
}
self.add_attributes_if_not_default(
output,
base = (self.base, (True,)),
_base = (self._base, (False,)),
url2 = (self.url2, ("",)),
url3 = (self.url3, ("",)),
zoomLevels = (self.zoom_levels, (9,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder_and_visibility(output)
return output
# -----------------------------------------------------------------------------
class OpenWeatherMapLayer(Layer):
"""
OpenWeatherMap Layers from Catalogue
"""
tablename = "gis_layer_openweathermap"
js_array = "S3.gis.OWM"
# -------------------------------------------------------------------------
def as_dict(self):
sublayers = self.sublayers
if sublayers:
if current.response.s3.debug:
# Non-debug has this included within OpenLayers.js
self.scripts.append("scripts/gis/OWM.OpenLayers.1.3.0.2.js")
output = {}
for sublayer in sublayers:
if sublayer.type == "station":
output["station"] = {"name": sublayer.name or "Weather Stations",
"id": sublayer.layer_id,
"dir": sublayer.dir,
"visibility": sublayer.visible
}
elif sublayer.type == "city":
output["city"] = {"name": sublayer.name or "Current Weather",
"id": sublayer.layer_id,
"dir": sublayer.dir,
"visibility": sublayer.visible
}
return output
else:
return None
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layer as Javascript
- suitable for inclusion in the HTML page
"""
output = self.as_dict()
if output:
result = json.dumps(output, indent=4, sort_keys=True)
if result:
return '''%s=%s\n''' % (self.js_array, result)
return None
# -----------------------------------------------------------------------------
class ThemeLayer(Layer):
"""
Theme Layers from Catalogue
"""
tablename = "gis_layer_theme"
js_array = "S3.gis.layers_theme"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
url = "%s.geojson?theme_data.layer_theme_id=%i&polygons=1&maxdepth=0&references=location_id&fields=value" % \
(URL(c="gis", f="theme_data"),
self.id)
# Mandatory attributes
output = {
"id": self.layer_id,
"type": "theme",
"name": self.safe_name,
"url": url,
}
self.setup_folder_and_visibility(output)
self.setup_clustering(output)
style = json.loads(self.style)
self.add_attributes_if_not_default(
output,
style = (style, (None,)),
)
return output
# -----------------------------------------------------------------------------
class TMSLayer(Layer):
"""
TMS Layers from Catalogue
"""
tablename = "gis_layer_tms"
js_array = "S3.gis.layers_tms"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
output = {
"id": self.layer_id,
"type": "tms",
"name": self.safe_name,
"url": self.url,
"layername": self.layername
}
self.add_attributes_if_not_default(
output,
_base = (self._base, (False,)),
url2 = (self.url2, (None,)),
url3 = (self.url3, (None,)),
format = (self.img_format, ("png", None)),
zoomLevels = (self.zoom_levels, (19,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder(output)
return output
# -----------------------------------------------------------------------------
class WFSLayer(Layer):
"""
WFS Layers from Catalogue
"""
tablename = "gis_layer_wfs"
js_array = "S3.gis.layers_wfs"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
output = dict(
id = self.layer_id,
name = self.safe_name,
url = self.url,
title = self.title,
featureType = self.featureType,
featureNS = self.featureNS,
schema = self.wfs_schema,
)
self.add_attributes_if_not_default(
output,
version = (self.version, ("1.1.0",)),
geometryName = (self.geometryName, ("the_geom",)),
username = (self.username, (None,)),
password = (self.password, (None,)),
styleField = (self.style_field, (None,)),
styleValues = (self.style_values, ("{}", None)),
projection = (self.projection.epsg, (4326,)),
#editable
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class WMSLayer(Layer):
"""
WMS Layers from Catalogue
"""
js_array = "S3.gis.layers_wms"
tablename = "gis_layer_wms"
# -------------------------------------------------------------------------
def __init__(self):
super(WMSLayer, self).__init__()
if self.sublayers:
if current.response.s3.debug:
# Non-debug has this included within GeoExt.js
self.scripts.append("scripts/gis/gxp/plugins/WMSGetFeatureInfo.js")
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
if self.queryable:
current.response.s3.gis.get_feature_info = True
output = dict(
id = self.layer_id,
name = self.safe_name,
url = self.url,
layers = self.layers
)
legend_url = self.legend_url
if legend_url and not legend_url.startswith("http"):
legend_url = "%s/%s%s" % \
(current.deployment_settings.get_base_public_url(),
current.request.application,
legend_url)
self.add_attributes_if_not_default(
output,
transparent = (self.transparent, (True,)),
version = (self.version, ("1.1.1",)),
format = (self.img_format, ("image/png",)),
map = (self.map, (None,)),
username = (self.username, (None,)),
password = (self.password, (None,)),
buffer = (self.buffer, (0,)),
base = (self.base, (False,)),
_base = (self._base, (False,)),
style = (self.style, (None,)),
bgcolor = (self.bgcolor, (None,)),
tiled = (self.tiled, (False, )),
legendURL = (legend_url, (None,)),
queryable = (self.queryable, (False, )),
)
self.setup_folder_visibility_and_opacity(output)
return output
# -----------------------------------------------------------------------------
class XYZLayer(Layer):
"""
XYZ Layers from Catalogue
"""
tablename = "gis_layer_xyz"
js_array = "S3.gis.layers_xyz"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
output = {
"id": self.layer_id,
"name": self.safe_name,
"url": self.url
}
self.add_attributes_if_not_default(
output,
_base = (self._base, (False,)),
url2 = (self.url2, (None,)),
url3 = (self.url3, (None,)),
format = (self.img_format, ("png", None)),
zoomLevels = (self.zoom_levels, (19,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder(output)
return output
# =============================================================================
class S3Map(S3Search):
"""
Class to generate a Map with a Search form above it
@ToDo: Allow .configure() to override normal search_method with one
for map (like report)
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point to apply search method to S3Requests
@param r: the S3Request
@param attr: request attributes
"""
output = dict()
search = self.resource.search
if r.component and self != search:
output = search(r, **attr)
# Save search
elif "save" in r.vars :
r.interactive = False
output = self.save_search(r, **attr)
# Interactive or saved search
elif "load" in r.vars or r.interactive and \
search._S3Search__interactive:
# Put shortcuts where other methods expect them
self.advanced = search.advanced
# We want advanced open by default
#self.simple = search.simple
output = self.search_interactive(r, **attr)
if not output:
# Not supported
r.error(501, current.manager.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def search_interactive(self, r, **attr):
"""
Interactive search
@param r: the S3Request instance
@param attr: request parameters
@ToDo: Reload Map Layer by AJAX rather than doing a full-page refresh
@ToDo: Static JS to resize page to bounds when layer is loaded
@ToDo: Refactor components common to parent class
"""
T = current.T
session = current.session
table = self.table
if "location_id" in table or \
"site_id" in table:
# ok
pass
else:
session.error = T("This resource cannot be displayed on the map!")
redirect(r.url(method="search"))
# Get environment
request = self.request
response = current.response
resource = self.resource
db = current.db
s3db = current.s3db
gis = current.gis
tablename = self.tablename
# Initialize the form
form = DIV(_class="search_form form-container")
# Figure out which set of form values to use
# POST > GET > session > unfiltered
if r.http == "POST":
# POST
form_values = r.post_vars
else:
url_options = Storage([(k, v) for k, v in r.get_vars.iteritems() if v])
if url_options:
# GET
form_values = url_options
else:
session_options = session.s3.search_options
if session_options and tablename in session_options:
# session
session_options = session_options[tablename]
else:
# unfiltered
session_options = Storage()
form_values = session_options
# Build the search forms
simple_form, advanced_form = self.build_forms(r, form_values)
# Check for Load Search
if "load" in r.get_vars:
search_id = r.get_vars.get("load", None)
if not search_id:
r.error(400, current.manager.ERROR.BAD_RECORD)
r.post_vars = r.vars
search_table = s3db.pr_save_search
_query = (search_table.id == search_id)
record = current.db(_query).select(record.search_vars,
limitby=(0, 1)).first()
if not record:
r.error(400, current.manager.ERROR.BAD_RECORD)
s_vars = cPickle.loads(record.search_vars)
r.post_vars = Storage(s_vars["criteria"])
r.http = "POST"
# Process the search forms
query, errors = self.process_forms(r,
simple_form,
advanced_form,
form_values)
if not errors:
resource.add_filter(query)
search_vars = dict(simple=False,
advanced=True,
criteria=form_values)
else:
search_vars = dict()
if response.s3.simple_search:
form.append(DIV(_id="search-mode", _mode="simple"))
else:
form.append(DIV(_id="search-mode", _mode="advanced"))
# Save Search Widget
if session.auth and \
current.deployment_settings.get_save_search_widget():
save_search = self.save_search_widget(r, search_vars, **attr)
else:
save_search = DIV()
# Complete the output form
if simple_form is not None:
simple_form.append(save_search)
form.append(simple_form)
if advanced_form is not None:
advanced_form.append(save_search)
form.append(advanced_form)
# Add a map for search results
# (this same map is also used by the Map Search Widget, if-present)
# Build URL to load the features onto the map
if query:
vars = query.serialize_url(resource=resource)
else:
vars = None
url = URL(extension="geojson",
args=None,
vars=vars)
feature_resources = [{
"name" : T("Search Results"),
"id" : "search_results",
"url" : url,
"active" : True,
"marker" : gis.get_marker(request.controller, request.function)
}]
map = gis.show_map(
feature_resources=feature_resources,
catalogue_layers=True,
legend=True,
toolbar=True,
collapsed=True,
search = True,
)
# Title
title = self.crud_string(tablename, "title_map")
# View
response.view = self._view(r, "map.html")
# RHeader gets added later in S3Method()
output = dict(
title = title,
form = form,
map = map,
)
return output
# =============================================================================
class Geocoder(object):
"""
Base class for all Geocoders
"""
def __init__(self):
" Initializes the page content object "
pass
# -------------------------------------------------------------------------
@staticmethod
def get_api_key(type):
" Acquire API key from the database "
pass
# -----------------------------------------------------------------------------
class GoogleGeocoder(Geocoder):
"""
Google Geocoder module
http://code.google.com/apis/maps/documentation/javascript/v2/reference.html#GGeoStatusCode
Should convert this to be a thin wrapper for modules.geopy.geocoders.google
"""
def __init__(self, location):
" Initialise parent class & make any necessary modifications "
Geocoder.__init__(self)
api_key = current.deployment_settings.get_gis_api_google()
params = {"q": location, "key": api_key}
self.url = "http://maps.google.com/maps/geo?%s" % urllib.urlencode(params)
# -------------------------------------------------------------------------
def get_json(self):
" Returns the output in JSON format "
from gluon.tools import fetch
url = self.url
page = fetch(url)
return page
# -----------------------------------------------------------------------------
class YahooGeocoder(Geocoder):
"""
Yahoo Geocoder module
Should convert this to be a thin wrapper for modules.geopy.geocoders.`
"""
def __init__(self, location):
" Initialise parent class & make any necessary modifications "
Geocoder.__init__(self)
api_key = current.deployment_settings.get_gis_api_yahoo()
params = {"location": location, "appid": api_key}
self.url = "http://local.yahooapis.com/MapsService/V1/geocode?%s" % urllib.urlencode(params)
# -------------------------------------------------------------------------
def get_xml(self):
" Return the output in XML format "
from gluon.tools import fetch
url = self.url
page = fetch(url)
return page
# =============================================================================
class S3ExportPOI(S3Method):
""" Export point-of-interest resources for a location """
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
manager = current.manager
output = dict()
if r.http == "GET":
output = self.export(r, **attr)
else:
r.error(405, manager.ERROR.BAD_METHOD)
return output
# -------------------------------------------------------------------------
def export(self, r, **attr):
"""
Export POI resources.
URL options:
- "resources" list of tablenames to export records from
- "msince" datetime in ISO format, "auto" to use the
feed's last update
- "update_feed" 0 to skip the update of the feed's last
update datetime, useful for trial exports
Supported formats:
.xml S3XML
.osm OSM XML Format
.kml Google KML
(other formats can be requested, but may give unexpected results)
@param r: the S3Request
@param attr: controller options for this request
"""
import datetime, time
tfmt = current.xml.ISOFORMAT
# Determine request Lx
current_lx = r.record
if not current_lx: # or not current_lx.level:
# Must have a location
r.error(400, current.manager.error.BAD_REQUEST)
else:
self.lx = current_lx.id
tables = []
# Parse the ?resources= parameter
if "resources" in r.get_vars:
resources = r.get_vars["resources"]
else:
# Fallback to deployment_setting
resources = current.deployment_settings.get_gis_poi_resources()
if not isinstance(resources, list):
resources = [resources]
[tables.extend(t.split(",")) for t in resources]
# Parse the ?update_feed= parameter
update_feed = True
if "update_feed" in r.get_vars:
_update_feed = r.get_vars["update_feed"]
if _update_feed == "0":
update_feed = False
# Parse the ?msince= parameter
msince = None
if "msince" in r.get_vars:
msince = r.get_vars["msince"]
if msince.lower() == "auto":
msince = "auto"
else:
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = \
time.strptime(msince, tfmt)
msince = datetime.datetime(y, m, d, hh, mm, ss)
except ValueError:
msince = None
# Export a combined tree
tree = self.export_combined_tree(tables,
msince=msince,
update_feed=update_feed)
xml = current.xml
manager = current.manager
# Set response headers
headers = current.response.headers
representation = r.representation
if r.representation in manager.json_formats:
as_json = True
default = "application/json"
else:
as_json = False
default = "text/xml"
headers["Content-Type"] = manager.content_type.get(representation,
default)
# Find XSLT stylesheet and transform
stylesheet = r.stylesheet()
if tree and stylesheet is not None:
args = Storage(domain=manager.domain,
base_url=manager.s3.base_url,
utcnow=datetime.datetime.utcnow().strftime(tfmt))
tree = xml.transform(tree, stylesheet, **args)
if tree:
if as_json:
output = xml.tree2json(tree, pretty_print=True)
else:
output = xml.tostring(tree, pretty_print=True)
return output
# -------------------------------------------------------------------------
def export_combined_tree(self, tables, msince=None, update_feed=True):
"""
Export a combined tree of all records in tables, which
are in Lx, and have been updated since msince.
@param tables: list of table names
@param msince: minimum modified_on datetime, "auto" for
automatic from feed data, None to turn it off
@param update_feed: update the last_update datetime in the feed
"""
db = current.db
s3db = current.s3db
ftable = s3db.gis_poi_feed
lx = self.lx
elements = []
results = 0
for tablename in tables:
# Define the resource
try:
resource = s3db.resource(tablename, components=[])
except AttributeError:
# Table not defined (module deactivated?)
continue
# Check
if "location_id" not in resource.fields:
# Hardly a POI resource without location_id
continue
# Add Lx filter
self._add_lx_filter(resource, lx)
# Get the feed data
query = (ftable.tablename == tablename) & \
(ftable.location_id == lx)
feed = db(query).select(limitby=(0, 1)).first()
if msince == "auto":
if feed is None:
_msince = None
else:
_msince = feed.last_update
else:
_msince = msince
# Export the tree and append its element to the element list
tree = resource.export_tree(msince=_msince,
references=["location_id"])
# Update the feed data
if update_feed:
muntil = resource.muntil
if feed is None:
ftable.insert(location_id = lx,
tablename = tablename,
last_update = muntil)
else:
feed.update_record(last_update = muntil)
elements.extend([c for c in tree.getroot()])
# Combine all elements in one tree and return it
tree = current.xml.tree(elements, results=len(elements))
return tree
# -------------------------------------------------------------------------
@staticmethod
def _add_lx_filter(resource, lx):
"""
Add a Lx filter for the current location to this
resource.
@param resource: the resource
"""
from s3resource import S3FieldSelector as FS
query = (FS("location_id$path").contains("/%s/" % lx)) | \
(FS("location_id$path").like("%s/%%" % lx))
resource.add_filter(query)
# -----------------------------------------------------------------------------
class S3ImportPOI(S3Method):
""" Import point-of-interest resources for a location """
# -------------------------------------------------------------------------
@staticmethod
def apply_method(r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
if r.representation == "html":
T = current.T
auth = current.auth
s3db = current.s3db
request = current.request
response = current.response
title = T("Import from OpenStreetMap")
form = FORM(
TABLE(
TR(
TD(T("Can read PoIs either from an OpenStreetMap file (.osm) or mirror."),
_colspan=3),
),
TR(
TD(B("%s: " % T("File"))),
TD(INPUT(_type="file", _name="file", _size="50")),
TD(SPAN("*", _class="req",
_style="padding-right: 5px;"))
),
TR(
TD(),
TD(T("or")),
TD(),
),
TR(
TD(B("%s: " % T("Host"))),
TD(INPUT(_type="text", _name="host",
_id="host", _value="localhost")),
TD(),
),
TR(
TD(B("%s: " % T("Database"))),
TD(INPUT(_type="text", _name="database",
_id="database", _value="osm")),
TD(),
),
TR(
TD(B("%s: " % T("User"))),
TD(INPUT(_type="text", _name="user",
_id="user", _value="osm")),
TD(),
),
TR(
TD(B("%s: " % T("Password"))),
TD(INPUT(_type="text", _name="password",
_id="password", _value="osm")),
TD(),
),
TR(
TD(B("%s: " % T("Ignore Errors?"))),
TD(INPUT(_type="checkbox", _name="ignore_errors",
_id="ignore_errors")),
TD(),
),
TR(TD(),
TD(INPUT(_type="submit", _value=T("Import"))),
TD(),
)
)
)
if not r.id:
from s3validators import IS_LOCATION
from s3widgets import S3LocationAutocompleteWidget
# dummy field
field = s3db.org_office.location_id
field.requires = IS_NULL_OR(IS_LOCATION())
widget = S3LocationAutocompleteWidget()(field, None)
row = TR(TD(B("%s: " % T("Location"))),
TD(widget),
TD(SPAN("*", _class="req",
_style="padding-right: 5px;"))
)
form[0].insert(3, row)
response.view = "create.html"
output = dict(title=title,
form=form)
if form.accepts(request.vars, current.session):
vars = form.vars
if vars.file != "":
File = vars.file.file
else:
# Create .poly file
if r.record:
record = r.record
elif not vars.location_id:
form.errors["location_id"] = T("Location is Required!")
return output
else:
gtable = s3db.gis_location
record = current.db(gtable.id == vars.location_id).select(gtable.name,
gtable.wkt,
limitby=(0, 1)
).first()
if record.wkt is None:
form.errors["location_id"] = T("Location needs to have WKT!")
return output
error = GIS.create_poly(record)
if error:
current.session.error = error
redirect(URL(args=r.id))
# Use Osmosis to extract an .osm file using this .poly
name = record.name
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
filename = os.path.join(TEMP, "%s.osm" % name)
cmd = ["/home/osm/osmosis/bin/osmosis", # @ToDo: deployment_setting
"--read-pgsql",
"host=%s" % vars.host,
"database=%s" % vars.database,
"user=%s" % vars.user,
"password=%s" % vars.password,
"--dataset-dump",
"--bounding-polygon",
"file=%s" % os.path.join(TEMP, "%s.poly" % name),
"--write-xml",
"file=%s" % filename,
]
import subprocess
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError, e:
current.session.error = T("OSM file generation failed: %s") % e.output
redirect(URL(args=r.id))
except AttributeError:
# Python < 2.7
error = subprocess.call(cmd, shell=True)
if error:
current.session.error = T("OSM file generation failed!")
redirect(URL(args=r.id))
try:
File = open(filename, "r")
except:
current.session.error = T("Cannot open created OSM file!")
redirect(URL(args=r.id))
stylesheet = os.path.join(request.folder, "static", "formats",
"osm", "import.xsl")
ignore_errors = vars.get("ignore_errors", None)
xml = current.xml
tree = xml.parse(File)
define_resource = s3db.resource
response.error = ""
import_count = 0
for tablename in current.deployment_settings.get_gis_poi_resources():
try:
table = s3db[tablename]
except:
# Module disabled
continue
resource = define_resource(tablename)
s3xml = xml.transform(tree, stylesheet_path=stylesheet,
name=resource.name)
try:
success = resource.import_xml(s3xml,
ignore_errors=ignore_errors)
import_count += resource.import_count
except:
import sys
response.error += str(sys.exc_info()[1])
if import_count:
response.confirmation = "%s %s" % \
(import_count,
T("PoIs successfully imported."))
else:
response.information = T("No PoIs available.")
return output
else:
raise HTTP(501, BADMETHOD)
# END =========================================================================
|
vgupta6/Project-2
|
modules/s3/s3gis.py
|
Python
|
mit
| 313,191
|
[
"Amber"
] |
a28fa46cdc94f8246e2285cb99b6926ba075736412b4fa0e2f70ff041ffca6de
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.