code
stringlengths 1
199k
|
|---|
'output dimensionalities for each column'
import csv
import sys
import re
import math
from collections import defaultdict
def get_words( text ):
text = text.replace( "'", "" )
text = re.sub( r'\W+', ' ', text )
text = text.lower()
text = text.split()
words = []
for w in text:
if w in words:
continue
words.append( w )
return words
csv.field_size_limit( 1000000 )
input_file = sys.argv[1]
target_col = 'SalaryNormalized'
cols2tokenize = [ 'Title', 'FullDescription' ]
cols2binarize = [ 'Loc1', 'Loc2', 'Loc3', 'Loc4', 'ContractType', 'ContractTime', 'Company', 'Category', 'SourceName' ]
cols2drop = [ 'SalaryRaw' ]
i_f = open( input_file )
reader = csv.reader( i_f )
headers = reader.next()
target_index = headers.index( target_col )
indexes2tokenize = map( lambda x: headers.index( x ), cols2tokenize )
indexes2binarize = map( lambda x: headers.index( x ), cols2binarize )
indexes2drop = map( lambda x: headers.index( x ), cols2drop )
n = 0
unique_values = defaultdict( set )
for line in reader:
for i in indexes2binarize:
value = line[i]
unique_values[i].add( value )
for i in indexes2tokenize:
words = get_words( line[i] )
unique_values[i].update( words )
n += 1
if n % 10000 == 0:
print n
for i in sorted( unique_values ):
l = len( unique_values[i] )
print "index: %s, count: %s" % ( i, l )
if l < 100:
pass
# print unique_values[i]
|
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
import numpy as np
import csv, gzip, os
from pyqtgraph import Point
class GlassDB:
"""
Database of dispersion coefficients for Schott glasses
+ Corning 7980
"""
def __init__(self, fileName='schott_glasses.csv'):
path = os.path.dirname(__file__)
fh = gzip.open(os.path.join(path, 'schott_glasses.csv.gz'), 'rb')
r = csv.reader(map(str, fh.readlines()))
lines = [x for x in r]
self.data = {}
header = lines[0]
for l in lines[1:]:
info = {}
for i in range(1, len(l)):
info[header[i]] = l[i]
self.data[l[0]] = info
self.data['Corning7980'] = { ## Thorlabs UV fused silica--not in schott catalog.
'B1': 0.68374049400,
'B2': 0.42032361300,
'B3': 0.58502748000,
'C1': 0.00460352869,
'C2': 0.01339688560,
'C3': 64.49327320000,
'TAUI25/250': 0.95, ## transmission data is fabricated, but close.
'TAUI25/1400': 0.98,
}
for k in self.data:
self.data[k]['ior_cache'] = {}
def ior(self, glass, wl):
"""
Return the index of refraction for *glass* at wavelength *wl*.
The *glass* argument must be a key in self.data.
"""
info = self.data[glass]
cache = info['ior_cache']
if wl not in cache:
B = list(map(float, [info['B1'], info['B2'], info['B3']]))
C = list(map(float, [info['C1'], info['C2'], info['C3']]))
w2 = (wl/1000.)**2
n = np.sqrt(1.0 + (B[0]*w2 / (w2-C[0])) + (B[1]*w2 / (w2-C[1])) + (B[2]*w2 / (w2-C[2])))
cache[wl] = n
return cache[wl]
def transmissionCurve(self, glass):
data = self.data[glass]
keys = [int(x[7:]) for x in data.keys() if 'TAUI25' in x]
keys.sort()
curve = np.empty((2,len(keys)))
for i in range(len(keys)):
curve[0][i] = keys[i]
key = 'TAUI25/%d' % keys[i]
val = data[key]
if val == '':
val = 0
else:
val = float(val)
curve[1][i] = val
return curve
GLASSDB = GlassDB()
def wlPen(wl):
"""Return a pen representing the given wavelength"""
l1 = 400
l2 = 700
hue = np.clip(((l2-l1) - (wl-l1)) * 0.8 / (l2-l1), 0, 0.8)
val = 1.0
if wl > 700:
val = 1.0 * (((700-wl)/700.) + 1)
elif wl < 400:
val = wl * 1.0/400.
#print hue, val
color = pg.hsvColor(hue, 1.0, val)
pen = pg.mkPen(color)
return pen
class ParamObj(object):
# Just a helper for tracking parameters and responding to changes
def __init__(self):
self.__params = {}
def __setitem__(self, item, val):
self.setParam(item, val)
def setParam(self, param, val):
self.setParams(**{param:val})
def setParams(self, **params):
"""Set parameters for this optic. This is a good function to override for subclasses."""
self.__params.update(params)
self.paramStateChanged()
def paramStateChanged(self):
pass
def __getitem__(self, item):
# bug in pyside 1.2.2 causes getitem to be called inside QGraphicsObject.parentItem:
return self.getParam(item) # PySide bug: https://bugreports.qt.io/browse/PYSIDE-441
def getParam(self, param):
return self.__params[param]
class Optic(pg.GraphicsObject, ParamObj):
sigStateChanged = QtCore.Signal()
def __init__(self, gitem, **params):
ParamObj.__init__(self)
pg.GraphicsObject.__init__(self) #, [0,0], [1,1])
self.gitem = gitem
self.surfaces = gitem.surfaces
gitem.setParentItem(self)
self.roi = pg.ROI([0,0], [1,1])
self.roi.addRotateHandle([1, 1], [0.5, 0.5])
self.roi.setParentItem(self)
defaults = {
'pos': Point(0,0),
'angle': 0,
}
defaults.update(params)
self._ior_cache = {}
self.roi.sigRegionChanged.connect(self.roiChanged)
self.setParams(**defaults)
def updateTransform(self):
self.resetTransform()
self.setPos(0, 0)
self.translate(Point(self['pos']))
self.rotate(self['angle'])
def setParam(self, param, val):
ParamObj.setParam(self, param, val)
def paramStateChanged(self):
"""Some parameters of the optic have changed."""
# Move graphics item
self.gitem.setPos(Point(self['pos']))
self.gitem.resetTransform()
self.gitem.rotate(self['angle'])
# Move ROI to match
try:
self.roi.sigRegionChanged.disconnect(self.roiChanged)
br = self.gitem.boundingRect()
o = self.gitem.mapToParent(br.topLeft())
self.roi.setAngle(self['angle'])
self.roi.setPos(o)
self.roi.setSize([br.width(), br.height()])
finally:
self.roi.sigRegionChanged.connect(self.roiChanged)
self.sigStateChanged.emit()
def roiChanged(self, *args):
pos = self.roi.pos()
# rotate gitem temporarily so we can decide where it will need to move
self.gitem.resetTransform()
self.gitem.rotate(self.roi.angle())
br = self.gitem.boundingRect()
o1 = self.gitem.mapToParent(br.topLeft())
self.setParams(angle=self.roi.angle(), pos=pos + (self.gitem.pos() - o1))
def boundingRect(self):
return QtCore.QRectF()
def paint(self, p, *args):
pass
def ior(self, wavelength):
return GLASSDB.ior(self['glass'], wavelength)
class Lens(Optic):
def __init__(self, **params):
defaults = {
'dia': 25.4, ## diameter of lens
'r1': 50., ## positive means convex, use 0 for planar
'r2': 0, ## negative means convex
'd': 4.0,
'glass': 'N-BK7',
'reflect': False,
}
defaults.update(params)
d = defaults.pop('d')
defaults['x1'] = -d/2.
defaults['x2'] = d/2.
gitem = CircularSolid(brush=(100, 100, 130, 100), **defaults)
Optic.__init__(self, gitem, **defaults)
def propagateRay(self, ray):
"""Refract, reflect, absorb, and/or scatter ray. This function may create and return new rays"""
"""
NOTE:: We can probably use this to compute refractions faster: (from GLSL 120 docs)
For the incident vector I and surface normal N, and the
ratio of indices of refraction eta, return the refraction
vector. The result is computed by
k = 1.0 - eta * eta * (1.0 - dot(N, I) * dot(N, I))
if (k < 0.0)
return genType(0.0)
else
return eta * I - (eta * dot(N, I) + sqrt(k)) * N
The input parameters for the incident vector I and the
surface normal N must already be normalized to get the
desired results. eta == ratio of IORs
For reflection:
For the incident vector I and surface orientation N,
returns the reflection direction:
I – 2 ∗ dot(N, I) ∗ N
N must already be normalized in order to achieve the
desired result.
"""
iors = [self.ior(ray['wl']), 1.0]
for i in [0,1]:
surface = self.surfaces[i]
ior = iors[i]
p1, ai = surface.intersectRay(ray)
#print "surface intersection:", p1, ai*180/3.14159
#trans = self.sceneTransform().inverted()[0] * surface.sceneTransform()
#p1 = trans.map(p1)
if p1 is None:
ray.setEnd(None)
break
p1 = surface.mapToItem(ray, p1)
#print "adjusted position:", p1
#ior = self.ior(ray['wl'])
rd = ray['dir']
a1 = np.arctan2(rd[1], rd[0])
ar = a1 - ai + np.arcsin((np.sin(ai) * ray['ior'] / ior))
#print [x for x in [a1, ai, (np.sin(ai) * ray['ior'] / ior), ar]]
#print ai, np.sin(ai), ray['ior'], ior
ray.setEnd(p1)
dp = Point(np.cos(ar), np.sin(ar))
#p2 = p1+dp
#p1p = self.mapToScene(p1)
#p2p = self.mapToScene(p2)
#dpp = Point(p2p-p1p)
ray = Ray(parent=ray, ior=ior, dir=dp)
return [ray]
class Mirror(Optic):
def __init__(self, **params):
defaults = {
'r1': 0,
'r2': 0,
'd': 0.01,
}
defaults.update(params)
d = defaults.pop('d')
defaults['x1'] = -d/2.
defaults['x2'] = d/2.
gitem = CircularSolid(brush=(100,100,100,255), **defaults)
Optic.__init__(self, gitem, **defaults)
def propagateRay(self, ray):
"""Refract, reflect, absorb, and/or scatter ray. This function may create and return new rays"""
surface = self.surfaces[0]
p1, ai = surface.intersectRay(ray)
if p1 is not None:
p1 = surface.mapToItem(ray, p1)
rd = ray['dir']
a1 = np.arctan2(rd[1], rd[0])
ar = a1 + np.pi - 2*ai
ray.setEnd(p1)
dp = Point(np.cos(ar), np.sin(ar))
ray = Ray(parent=ray, dir=dp)
else:
ray.setEnd(None)
return [ray]
class CircularSolid(pg.GraphicsObject, ParamObj):
"""GraphicsObject with two circular or flat surfaces."""
def __init__(self, pen=None, brush=None, **opts):
"""
Arguments for each surface are:
x1,x2 - position of center of _physical surface_
r1,r2 - radius of curvature
d1,d2 - diameter of optic
"""
defaults = dict(x1=-2, r1=100, d1=25.4, x2=2, r2=100, d2=25.4)
defaults.update(opts)
ParamObj.__init__(self)
self.surfaces = [CircleSurface(defaults['r1'], defaults['d1']), CircleSurface(-defaults['r2'], defaults['d2'])]
pg.GraphicsObject.__init__(self)
for s in self.surfaces:
s.setParentItem(self)
if pen is None:
self.pen = pg.mkPen((220,220,255,200), width=1, cosmetic=True)
else:
self.pen = pg.mkPen(pen)
if brush is None:
self.brush = pg.mkBrush((230, 230, 255, 30))
else:
self.brush = pg.mkBrush(brush)
self.setParams(**defaults)
def paramStateChanged(self):
self.updateSurfaces()
def updateSurfaces(self):
self.surfaces[0].setParams(self['r1'], self['d1'])
self.surfaces[1].setParams(-self['r2'], self['d2'])
self.surfaces[0].setPos(self['x1'], 0)
self.surfaces[1].setPos(self['x2'], 0)
self.path = QtGui.QPainterPath()
self.path.connectPath(self.surfaces[0].path.translated(self.surfaces[0].pos()))
self.path.connectPath(self.surfaces[1].path.translated(self.surfaces[1].pos()).toReversed())
self.path.closeSubpath()
def boundingRect(self):
return self.path.boundingRect()
def shape(self):
return self.path
def paint(self, p, *args):
p.setRenderHints(p.renderHints() | p.Antialiasing)
p.setPen(self.pen)
p.fillPath(self.path, self.brush)
p.drawPath(self.path)
class CircleSurface(pg.GraphicsObject):
def __init__(self, radius=None, diameter=None):
"""center of physical surface is at 0,0
radius is the radius of the surface. If radius is None, the surface is flat.
diameter is of the optic's edge."""
pg.GraphicsObject.__init__(self)
self.r = radius
self.d = diameter
self.mkPath()
def setParams(self, r, d):
self.r = r
self.d = d
self.mkPath()
def mkPath(self):
self.prepareGeometryChange()
r = self.r
d = self.d
h2 = d/2.
self.path = QtGui.QPainterPath()
if r == 0: ## flat surface
self.path.moveTo(0, h2)
self.path.lineTo(0, -h2)
else:
## half-height of surface can't be larger than radius
h2 = min(h2, abs(r))
#dx = abs(r) - (abs(r)**2 - abs(h2)**2)**0.5
#p.moveTo(-d*w/2.+ d*dx, d*h2)
arc = QtCore.QRectF(0, -r, r*2, r*2)
#self.surfaces.append((arc.center(), r, h2))
a1 = np.arcsin(h2/r) * 180. / np.pi
a2 = -2*a1
a1 += 180.
self.path.arcMoveTo(arc, a1)
self.path.arcTo(arc, a1, a2)
#if d == -1:
#p1 = QtGui.QPainterPath()
#p1.addRect(arc)
#self.paths.append(p1)
self.h2 = h2
def boundingRect(self):
return self.path.boundingRect()
def paint(self, p, *args):
return ## usually we let the optic draw.
#p.setPen(pg.mkPen('r'))
#p.drawPath(self.path)
def intersectRay(self, ray):
## return the point of intersection and the angle of incidence
#print "intersect ray"
h = self.h2
r = self.r
p, dir = ray.currentState(relativeTo=self) # position and angle of ray in local coords.
#print " ray: ", p, dir
p = p - Point(r, 0) ## move position so center of circle is at 0,0
#print " adj: ", p, r
if r == 0:
#print " flat"
if dir[0] == 0:
y = 0
else:
y = p[1] - p[0] * dir[1]/dir[0]
if abs(y) > h:
return None, None
else:
return (Point(0, y), np.arctan2(dir[1], dir[0]))
else:
#print " curve"
## find intersection of circle and line (quadratic formula)
dx = dir[0]
dy = dir[1]
dr = (dx**2 + dy**2) ** 0.5
D = p[0] * (p[1]+dy) - (p[0]+dx) * p[1]
idr2 = 1.0 / dr**2
disc = r**2 * dr**2 - D**2
if disc < 0:
return None, None
disc2 = disc**0.5
if dy < 0:
sgn = -1
else:
sgn = 1
br = self.path.boundingRect()
x1 = (D*dy + sgn*dx*disc2) * idr2
y1 = (-D*dx + abs(dy)*disc2) * idr2
if br.contains(x1+r, y1):
pt = Point(x1, y1)
else:
x2 = (D*dy - sgn*dx*disc2) * idr2
y2 = (-D*dx - abs(dy)*disc2) * idr2
pt = Point(x2, y2)
if not br.contains(x2+r, y2):
return None, None
raise Exception("No intersection!")
norm = np.arctan2(pt[1], pt[0])
if r < 0:
norm += np.pi
#print " norm:", norm*180/3.1415
dp = p - pt
#print " dp:", dp
ang = np.arctan2(dp[1], dp[0])
#print " ang:", ang*180/3.1415
#print " ai:", (ang-norm)*180/3.1415
#print " intersection:", pt
return pt + Point(r, 0), ang-norm
class Ray(pg.GraphicsObject, ParamObj):
"""Represents a single straight segment of a ray"""
sigStateChanged = QtCore.Signal()
def __init__(self, **params):
ParamObj.__init__(self)
defaults = {
'ior': 1.0,
'wl': 500,
'end': None,
'dir': Point(1,0),
}
self.params = {}
pg.GraphicsObject.__init__(self)
self.children = []
parent = params.get('parent', None)
if parent is not None:
defaults['start'] = parent['end']
defaults['wl'] = parent['wl']
self['ior'] = parent['ior']
self['dir'] = parent['dir']
parent.addChild(self)
defaults.update(params)
defaults['dir'] = Point(defaults['dir'])
self.setParams(**defaults)
self.mkPath()
def clearChildren(self):
for c in self.children:
c.clearChildren()
c.setParentItem(None)
self.scene().removeItem(c)
self.children = []
def paramStateChanged(self):
pass
def addChild(self, ch):
self.children.append(ch)
ch.setParentItem(self)
def currentState(self, relativeTo=None):
pos = self['start']
dir = self['dir']
if relativeTo is None:
return pos, dir
else:
trans = self.itemTransform(relativeTo)[0]
p1 = trans.map(pos)
p2 = trans.map(pos + dir)
return Point(p1), Point(p2-p1)
def setEnd(self, end):
self['end'] = end
self.mkPath()
def boundingRect(self):
return self.path.boundingRect()
def paint(self, p, *args):
#p.setPen(pg.mkPen((255,0,0, 150)))
p.setRenderHints(p.renderHints() | p.Antialiasing)
p.setCompositionMode(p.CompositionMode_Plus)
p.setPen(wlPen(self['wl']))
p.drawPath(self.path)
def mkPath(self):
self.prepareGeometryChange()
self.path = QtGui.QPainterPath()
self.path.moveTo(self['start'])
if self['end'] is not None:
self.path.lineTo(self['end'])
else:
self.path.lineTo(self['start']+500*self['dir'])
def trace(rays, optics):
if len(optics) < 1 or len(rays) < 1:
return
for r in rays:
r.clearChildren()
o = optics[0]
r2 = o.propagateRay(r)
trace(r2, optics[1:])
class Tracer(QtCore.QObject):
"""
Simple ray tracer.
Initialize with a list of rays and optics;
calling trace() will cause rays to be extended by propagating them through
each optic in sequence.
"""
def __init__(self, rays, optics):
QtCore.QObject.__init__(self)
self.optics = optics
self.rays = rays
for o in self.optics:
o.sigStateChanged.connect(self.trace)
self.trace()
def trace(self):
trace(self.rays, self.optics)
|
import logging
from urllib.parse import urljoin
import lxml.etree # noqa: S410
import requests
from django.conf import settings as django_settings
from django.utils import timezone
logger = logging.getLogger(__name__)
class ClientError(Exception):
pass
class ResponseParseError(ClientError):
pass
class ResponseStatusError(ClientError):
pass
class RequestError(ClientError):
def __init__(self, message, response=None):
super(RequestError, self).__init__(message)
self.response = response
class UnknownStatusError(ResponseParseError):
pass
class Response:
ns_namespace = 'http://uri.etsi.org/TS102204/v1.1.2#'
def __init__(self, content):
etree = lxml.etree.fromstring(content) # noqa: S320
self.init_response_attributes(etree)
def init_response_attributes(self, etree):
""" Define response attributes based on valimo request content """
raise NotImplementedError
class Request:
url = NotImplemented
template = NotImplemented
response_class = NotImplemented
settings = getattr(django_settings, 'WALDUR_AUTH_VALIMO', {})
@classmethod
def execute(cls, **kwargs):
url = cls._get_url()
headers = {
'content-type': 'text/xml',
'SOAPAction': url,
}
data = cls.template.strip().format(
AP_ID=cls.settings['AP_ID'],
AP_PWD=cls.settings['AP_PWD'],
Instant=cls._format_datetime(timezone.now()),
DNSName=cls.settings['DNSName'],
**kwargs
)
cert = (cls.settings['cert_path'], cls.settings['key_path'])
# TODO: add verification
logger.debug(
'Executing POST request to %s with data:\n %s \nheaders: %s',
url,
data,
headers,
)
response = requests.post(
url,
data=data,
headers=headers,
cert=cert,
verify=cls.settings['verify_ssl'],
)
if response.ok:
return cls.response_class(response.content)
else:
message = (
'Failed to execute POST request against %s endpoint. Response [%s]: %s'
% (url, response.status_code, response.content)
)
raise RequestError(message, response)
@classmethod
def _format_datetime(cls, d):
return d.strftime('%Y-%m-%dT%H:%M:%S.000Z')
@classmethod
def _format_transaction_id(cls, transaction_id):
return ('_' + transaction_id)[:32] # such formation is required by server.
@classmethod
def _get_url(cls):
return urljoin(cls.settings['URL'], cls.url)
class SignatureResponse(Response):
def init_response_attributes(self, etree):
try:
self.backend_transaction_id = etree.xpath('//MSS_SignatureResp')[0].attrib[
'MSSP_TransID'
]
self.status = etree.xpath(
'//ns6:StatusCode', namespaces={'ns6': self.ns_namespace}
)[0].attrib['Value']
except (IndexError, KeyError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse signature response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
class SignatureRequest(Request):
url = '/MSSP/services/MSS_Signature'
template = """
<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://www.w3.org/2003/05/soap-envelope"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soapenv:Body>
<MSS_Signature xmlns="">
<MSS_SignatureReq MajorVersion="1" MessagingMode="{MessagingMode}" MinorVersion="1" TimeOut="300">
<ns1:AP_Info AP_ID="{AP_ID}" AP_PWD="{AP_PWD}" AP_TransID="{AP_TransID}"
Instant="{Instant}" xmlns:ns1="http://uri.etsi.org/TS102204/v1.1.2#"/>
<ns2:MSSP_Info xmlns:ns2="http://uri.etsi.org/TS102204/v1.1.2#">
<ns2:MSSP_ID>
<ns2:DNSName>{DNSName}</ns2:DNSName>
</ns2:MSSP_ID>
</ns2:MSSP_Info>
<ns3:MobileUser xmlns:ns3="http://uri.etsi.org/TS102204/v1.1.2#">
<ns3:MSISDN>{MSISDN}</ns3:MSISDN>
</ns3:MobileUser>
<ns4:DataToBeSigned Encoding="UTF-8" MimeType="text/plain" xmlns:ns4="http://uri.etsi.org/TS102204/v1.1.2#">
{DataToBeSigned}
</ns4:DataToBeSigned>
<ns5:SignatureProfile xmlns:ns5="http://uri.etsi.org/TS102204/v1.1.2#">
<ns5:mssURI>{SignatureProfile}</ns5:mssURI>
</ns5:SignatureProfile>
<ns6:MSS_Format xmlns:ns6="http://uri.etsi.org/TS102204/v1.1.2#">
<ns6:mssURI>http://uri.etsi.org/TS102204/v1.1.2#PKCS7</ns6:mssURI>
</ns6:MSS_Format>
</MSS_SignatureReq>
</MSS_Signature>
</soapenv:Body>
</soapenv:Envelope>
"""
response_class = SignatureResponse
@classmethod
def execute(cls, transaction_id, phone, message):
kwargs = {
'MessagingMode': 'asynchClientServer',
'AP_TransID': cls._format_transaction_id(transaction_id),
'MSISDN': phone,
'DataToBeSigned': '%s %s' % (cls.settings['message_prefix'], message),
'SignatureProfile': cls.settings['SignatureProfile'],
}
return super(SignatureRequest, cls).execute(**kwargs)
class Statuses:
OK = 'OK'
PROCESSING = 'Processing'
ERRED = 'Erred'
@classmethod
def map(cls, status_code):
if status_code == '502':
return cls.OK
elif status_code == '504':
return cls.PROCESSING
else:
raise UnknownStatusError(
'Received unsupported status in response: %s' % status_code
)
class StatusResponse(Response):
def init_response_attributes(self, etree):
try:
status_code = etree.xpath(
'//ns5:StatusCode', namespaces={'ns5': self.ns_namespace}
)[0].attrib['Value']
except (IndexError, KeyError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse status response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
self.status = Statuses.map(status_code)
try:
civil_number_tag = etree.xpath(
'//ns4:UserIdentifier', namespaces={'ns4': self.ns_namespace}
)[0]
except IndexError:
# civil number tag does not exist - this is possible if request is still processing
return
else:
try:
self.civil_number = civil_number_tag.text.split('=')[1]
except IndexError:
raise ResponseParseError(
'Cannot get civil_number from tag text: %s' % civil_number_tag.text
)
class ErredStatusResponse(Response):
soapenv_namespace = 'http://www.w3.org/2003/05/soap-envelope'
def init_response_attributes(self, etree):
self.status = Statuses.ERRED
try:
self.details = etree.xpath(
'//soapenv:Text', namespaces={'soapenv': self.soapenv_namespace}
)[0].text
except (IndexError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse error status response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
class StatusRequest(Request):
url = '/MSSP/services/MSS_StatusPort'
template = """
<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://www.w3.org/2003/05/soap-envelope"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soapenv:Body>
<MSS_StatusQuery xmlns="">
<MSS_StatusReq MajorVersion="1" MinorVersion="1">
<ns1:AP_Info AP_ID="{AP_ID}" AP_PWD="{AP_PWD}" AP_TransID="{AP_TransID}"
Instant="{Instant}" xmlns:ns1="http://uri.etsi.org/TS102204/v1.1.2#"/>
<ns2:MSSP_Info xmlns:ns2="http://uri.etsi.org/TS102204/v1.1.2#">
<ns2:MSSP_ID>
<ns2:DNSName>{DNSName}</ns2:DNSName>
</ns2:MSSP_ID>
</ns2:MSSP_Info>
<ns3:MSSP_TransID xmlns:ns3="http://uri.etsi.org/TS102204/v1.1.2#">{MSSP_TransID}</ns3:MSSP_TransID>
</MSS_StatusReq>
</MSS_StatusQuery>
</soapenv:Body>
</soapenv:Envelope>
"""
response_class = StatusResponse
@classmethod
def execute(cls, transaction_id, backend_transaction_id):
kwargs = {
'AP_TransID': cls._format_transaction_id(transaction_id),
'MSSP_TransID': backend_transaction_id,
}
try:
return super(StatusRequest, cls).execute(**kwargs)
except RequestError as e:
# If request was timed out or user canceled login - Valimo would return response with status 500
return ErredStatusResponse(e.response.content)
|
from PyQt4 import QtCore
import acq4.Manager
import acq4.util.imageAnalysis as imageAnalysis
run = True
man = acq4.Manager.getManager()
cam = man.getDevice('Camera')
frames = []
def collect(frame):
global frames
frames.append(frame)
cam.sigNewFrame.connect(collect)
def measure():
if len(frames) == 0:
QtCore.QTimer.singleShot(100, measure)
return
global run
if run:
global frames
frame = frames[-1]
frames = []
img = frame.data()
w,h = img.shape
img = img[2*w/5:3*w/5, 2*h/5:3*h/5]
w,h = img.shape
fit = imageAnalysis.fitGaussian2D(img, [100, w/2., h/2., w/4., 0])
# convert sigma to full width at 1/e
fit[0][3] *= 2 * 2**0.5
print "WIDTH:", fit[0][3] * frame.info()['pixelSize'][0] * 1e6, "um"
print " fit:", fit
else:
global frames
frames = []
QtCore.QTimer.singleShot(2000, measure)
measure()
|
"""Create oauthclient tables."""
import sqlalchemy as sa
import sqlalchemy_utils
from alembic import op
from sqlalchemy.engine.reflection import Inspector
revision = '97bbc733896c'
down_revision = '44ab9963e8cf'
branch_labels = ()
depends_on = '9848d0149abd'
def upgrade():
"""Upgrade database."""
op.create_table(
'oauthclient_remoteaccount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column(
'extra_data',
sqlalchemy_utils.JSONType(),
nullable=False),
sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'client_id')
)
op.create_table(
'oauthclient_useridentity',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('method', sa.String(length=255), nullable=False),
sa.Column('id_user', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id', 'method')
)
op.create_index(
'useridentity_id_user_method', 'oauthclient_useridentity',
['id_user', 'method'], unique=True
)
op.create_table(
'oauthclient_remotetoken',
sa.Column('id_remote_account', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column(
'access_token',
sqlalchemy_utils.EncryptedType(),
nullable=False),
sa.Column('secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
['id_remote_account'], [u'oauthclient_remoteaccount.id'],
name='fk_oauthclient_remote_token_remote_account'
),
sa.PrimaryKeyConstraint('id_remote_account', 'token_type')
)
def downgrade():
"""Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
op.drop_table('oauthclient_remotetoken')
for fk in insp.get_foreign_keys('oauthclient_useridentity'):
if fk['referred_table'] == 'accounts_user':
op.drop_constraint(
op.f(fk['name']),
'oauthclient_useridentity',
type_='foreignkey'
)
op.drop_index(
'useridentity_id_user_method',
table_name='oauthclient_useridentity')
op.drop_table('oauthclient_useridentity')
op.drop_table('oauthclient_remoteaccount')
|
from thumbor.loaders import http_loader
from tornado.concurrent import return_future
from urllib import unquote
def _normalize_url(url):
url = http_loader.quote_url(unquote(url))
if url.startswith('http:'):
url = url.replace('http:', 'https:', 1)
return url if url.startswith('https://') else 'https://%s' % url
def validate(context, url):
if url.startswith('http://'):
return False
return http_loader.validate(context, url, normalize_url_func=_normalize_url)
def return_contents(response, url, callback, context):
return http_loader.return_contents(response, url, callback, context)
@return_future
def load(context, url, callback):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
def encode(string):
return http_loader.encode(string)
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/item/shared_armor_composite_helmet.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test setting the $P4COMSTR variable.
"""
import os.path
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.subdir('Perforce', ['Perforce', 'sub'], 'sub')
sub_Perforce = os.path.join('sub', 'Perforce')
sub_SConscript = os.path.join('sub', 'SConscript')
sub_all = os.path.join('sub', 'all')
sub_ddd_in = os.path.join('sub', 'ddd.in')
sub_ddd_out = os.path.join('sub', 'ddd.out')
sub_eee_in = os.path.join('sub', 'eee.in')
sub_eee_out = os.path.join('sub', 'eee.out')
sub_fff_in = os.path.join('sub', 'fff.in')
sub_fff_out = os.path.join('sub', 'fff.out')
test.write('my-p4.py', """
import shutil
import sys
for f in sys.argv[1:]:
shutil.copy('Perforce/'+f, f)
""")
test.write('SConstruct', """
def cat(env, source, target):
target = str(target[0])
source = map(str, source)
f = open(target, "wb")
for src in source:
f.write(open(src, "rb").read())
f.close()
env = Environment(TOOLS = ['default', 'Perforce'],
BUILDERS={'Cat':Builder(action=cat)},
P4COM='%(_python_)s my-p4.py $TARGET',
P4COMSTR='Checking out $TARGET from our fake Perforce')
env.Cat('aaa.out', 'aaa.in')
env.Cat('bbb.out', 'bbb.in')
env.Cat('ccc.out', 'ccc.in')
env.Cat('all', ['aaa.out', 'bbb.out', 'ccc.out'])
env.SourceCode('.', env.Perforce())
SConscript('sub/SConscript', "env")
""" % locals())
test.write(['Perforce', 'sub', 'SConscript'], """\
Import("env")
env.Cat('ddd.out', 'ddd.in')
env.Cat('eee.out', 'eee.in')
env.Cat('fff.out', 'fff.in')
env.Cat('all', ['ddd.out', 'eee.out', 'fff.out'])
""")
test.write(['Perforce', 'aaa.in'], "Perforce/aaa.in\n")
test.write('bbb.in', "checked-out bbb.in\n")
test.write(['Perforce', 'ccc.in'], "Perforce/ccc.in\n")
test.write(['Perforce', 'sub', 'ddd.in'], "Perforce/sub/ddd.in\n")
test.write(['sub', 'eee.in'], "checked-out sub/eee.in\n")
test.write(['Perforce', 'sub', 'fff.in'], "Perforce/sub/fff.in\n")
test.run(arguments = '.',
stdout = test.wrap_stdout(read_str = """\
Checking out %(sub_SConscript)s from our fake Perforce
""" % locals(),
build_str = """\
Checking out aaa.in from our fake Perforce
cat(["aaa.out"], ["aaa.in"])
cat(["bbb.out"], ["bbb.in"])
Checking out ccc.in from our fake Perforce
cat(["ccc.out"], ["ccc.in"])
cat(["all"], ["aaa.out", "bbb.out", "ccc.out"])
Checking out %(sub_ddd_in)s from our fake Perforce
cat(["%(sub_ddd_out)s"], ["%(sub_ddd_in)s"])
cat(["%(sub_eee_out)s"], ["%(sub_eee_in)s"])
Checking out %(sub_fff_in)s from our fake Perforce
cat(["%(sub_fff_out)s"], ["%(sub_fff_in)s"])
cat(["%(sub_all)s"], ["%(sub_ddd_out)s", "%(sub_eee_out)s", "%(sub_fff_out)s"])
""" % locals()))
test.must_match('all',
"Perforce/aaa.in\nchecked-out bbb.in\nPerforce/ccc.in\n")
test.must_match(['sub', 'all'],
"Perforce/sub/ddd.in\nchecked-out sub/eee.in\nPerforce/sub/fff.in\n")
test.pass_test()
|
from __future__ import unicode_literals
import time
import string
import json
import config
import helper
import busses
def log_message(msg):
#log format: time type message
time_str = str(time.time())
line = time_str[:time_str.find(".")]
line = line.rjust(10, str(" "))
line += " "
busses.status_bus["latest_messages"][msg.chat_id] = msg
msg_type = helper.get_message_type(msg)
if msg_type == "text" and msg.text.startswith("/"):
msg_type = "command"
appendix = "ERROR"
if msg_type == "text":
appendix = msg.text
elif msg_type == "command":
appendix = msg.text[1:]
elif msg_type == "location":
location_data = msg.location.to_dict()
appendix = str(location_data["latitude"]) + "°, " + str(location_data["longitude"]) + "°"
elif msg_type == "contact":
appendix = str(msg.contact.user_id) + " " + msg.contact.first_name + " " + msg.contact.last_name
elif msg_type == "new_user":
appendix = str(msg.new_chat_member.id) + " " + str(msg.new_chat_member.first_name) + " " + str(msg.new_chat_member.last_name)
elif msg_type in ["audio", "document", "game", "photo", "sticker", "video", "voice", "video_note", "unknown"]:
appendix = ""
msg_type = msg_type.rjust(10, str(" "))
appendix = appendix.replace("\n", "\\n").rjust(40, str(" "))
line += msg_type + " " + appendix + " "
line += str(msg.chat_id) + "," + str(msg.message_id)
line += "\n"
with open(config.msg_log_file_path, "a") as log_file:
log_file.write(line.encode("utf-8"))
def complete_log(update):
with open(config.complete_log_file_path, "a") as log_file:
data = update.to_dict()
data.update({"time": time.time()})
json_data = json.dumps(data)
log_file.write(str(json_data).replace("\n", "\\n") + "\n".encode("utf-8"))
|
import os
import time
import numpy as np
import matplotlib
matplotlib.use('GTKAgg')
from matplotlib import pyplot as plt
from koheron import connect
from drivers import Spectrum
from drivers import Laser
host = os.getenv('HOST','192.168.1.100')
client = connect(host, name='spectrum')
driver = Spectrum(client)
laser = Laser(client)
laser.start()
current = 30 # mA
laser.set_current(current)
wfm_size = 4096
decimation_factor = 1
index_low = 0
index_high = wfm_size / 2
signal = driver.get_decimated_data(decimation_factor, index_low, index_high)
print('Signal')
print(signal)
mhz = 1e6
sampling_rate = 125e6
freq_min = 0
freq_max = sampling_rate / mhz / 2
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.linspace(freq_min, freq_max, (wfm_size / 2))
print('X')
print(len(x))
y = 10*np.log10(signal)
print('Y')
print(len(y))
li, = ax.plot(x, y)
fig.canvas.draw()
ax.set_xlim((x[0],x[-1]))
ax.set_ylim((0,200))
ax.set_xlabel('Frequency (MHz)')
ax.set_ylabel('Power spectral density (dB)')
while True:
try:
signal = driver.get_decimated_data(decimation_factor, index_low, index_high)
li.set_ydata(10*np.log10(signal))
fig.canvas.draw()
plt.pause(0.001)
except KeyboardInterrupt:
# Save last spectrum in a csv file
np.savetxt("psd.csv", signal, delimiter=",")
laser.stop()
driver.close()
break
|
__version__ = '0.8.1'
__author__ = "Massimiliano Pippi & Federico Frenguelli"
VERSION = __version__ # synonym
|
"""Show file statistics by extension."""
import os
import sys
class Stats:
def __init__(self):
self.stats = {}
def statargs(self, args):
for arg in args:
if os.path.isdir(arg):
self.statdir(arg)
elif os.path.isfile(arg):
self.statfile(arg)
else:
sys.stderr.write("Can't find %s\n" % file)
self.addstats("<???>", "unknown", 1)
def statdir(self, dir):
self.addstats("<dir>", "dirs", 1)
try:
names = os.listdir(dir)
except os.error, err:
sys.stderr.write("Can't list %s: %s\n" % (file, err))
self.addstats(ext, "unlistable", 1)
return
names.sort()
for name in names:
if name.startswith(".#"):
continue # Skip CVS temp files
if name.endswith("~"):
continue# Skip Emacs backup files
full = os.path.join(dir, name)
if os.path.islink(full):
self.addstats("<lnk>", "links", 1)
elif os.path.isdir(full):
self.statdir(full)
else:
self.statfile(full)
def statfile(self, file):
head, ext = os.path.splitext(file)
head, base = os.path.split(file)
if ext == base:
ext = "" # E.g. .cvsignore is deemed not to have an extension
ext = os.path.normcase(ext)
if not ext:
ext = "<none>"
self.addstats(ext, "files", 1)
try:
f = open(file, "rb")
except IOError, err:
sys.stderr.write("Can't open %s: %s\n" % (file, err))
self.addstats(ext, "unopenable", 1)
return
data = f.read()
f.close()
self.addstats(ext, "bytes", len(data))
if '\0' in data:
self.addstats(ext, "binary", 1)
return
if not data:
self.addstats(ext, "empty", 1)
#self.addstats(ext, "chars", len(data))
lines = data.splitlines()
self.addstats(ext, "lines", len(lines))
del lines
words = data.split()
self.addstats(ext, "words", len(words))
def addstats(self, ext, key, n):
d = self.stats.setdefault(ext, {})
d[key] = d.get(key, 0) + n
def report(self):
exts = self.stats.keys()
exts.sort()
# Get the column keys
columns = {}
for ext in exts:
columns.update(self.stats[ext])
cols = columns.keys()
cols.sort()
colwidth = {}
colwidth["ext"] = max([len(ext) for ext in exts])
minwidth = 6
self.stats["TOTAL"] = {}
for col in cols:
total = 0
cw = max(minwidth, len(col))
for ext in exts:
value = self.stats[ext].get(col)
if value is None:
w = 0
else:
w = len("%d" % value)
total += value
cw = max(cw, w)
cw = max(cw, len(str(total)))
colwidth[col] = cw
self.stats["TOTAL"][col] = total
exts.append("TOTAL")
for ext in exts:
self.stats[ext]["ext"] = ext
cols.insert(0, "ext")
def printheader():
for col in cols:
print "%*s" % (colwidth[col], col),
print
printheader()
for ext in exts:
for col in cols:
value = self.stats[ext].get(col, "")
print "%*s" % (colwidth[col], value),
print
printheader() # Another header at the bottom
def main():
args = sys.argv[1:]
if not args:
args = [os.curdir]
s = Stats()
s.statargs(args)
s.report()
if __name__ == "__main__":
main()
|
import os
import pygtk
pygtk.require('2.0')
import gtk
from gtkcodebuffer import CodeBuffer, SyntaxLoader
class Ui(object):
"""
The user interface. This dialog is the LaTeX input window and includes
widgets to display compilation logs and a preview. It uses GTK2 which
must be installed an importable.
"""
app_name = 'InkTeX'
help_text = r"""You can set a preamble file and scale factor in the <b>settings</b> tab. The preamble should not include <b>\documentclass</b> and <b>\begin{document}</b>.
The LaTeX code you write is only the stuff between <b>\begin{document}</b> and <b>\end{document}</b>. Compilation errors are reported in the <b>log</b> tab.
The preamble file and scale factor are stored on a per-drawing basis, so in a new document, these information must be set again."""
about_text = r"""Written by <a href="mailto:janoliver@oelerich.org">Jan Oliver Oelerich <janoliver@oelerich.org></a>"""
def __init__(self, render_callback, src, settings):
"""Takes the following parameters:
* render_callback: callback function to execute with "apply" button
* src: source code that should be pre-inserted into the LaTeX input"""
self.render_callback = render_callback
self.src = src if src else ""
self.settings = settings
# init the syntax highlighting buffer
lang = SyntaxLoader("latex")
self.syntax_buffer = CodeBuffer(lang=lang)
self.setup_ui()
def render(self, widget, data=None):
"""Extracts the input LaTeX code and calls the render callback. If that
returns true, we quit and are happy."""
buf = self.text.get_buffer()
tex = buf.get_text(buf.get_start_iter(), buf.get_end_iter())
settings = dict()
if self.preamble.get_filename():
settings['preamble'] = self.preamble.get_filename()
settings['scale'] = self.scale.get_value()
if self.render_callback(tex, settings):
gtk.main_quit()
return False
def cancel(self, widget, data=None):
"""Close button pressed: Exit"""
raise SystemExit(1)
def destroy(self, widget, event, data=None):
"""Destroy hook for the GTK window. Quit and return False."""
gtk.main_quit()
return False
def setup_ui(self):
"""Creates the actual UI."""
# create a floating toplevel window and set some title and border
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
self.window.set_title(self.app_name)
self.window.set_border_width(8)
# connect delete and destroy events
self.window.connect("destroy", self.destroy)
self.window.connect("delete-event", self.destroy)
# This is our main container, vertically ordered.
self.box_container = gtk.VBox(False, 5)
self.box_container.show()
self.notebook = gtk.Notebook()
self.page_latex = gtk.HBox(False, 5)
self.page_latex.set_border_width(8)
self.page_latex.show()
self.page_log = gtk.HBox(False, 5)
self.page_log.set_border_width(8)
self.page_log.show()
self.page_settings = gtk.HBox(False, 5)
self.page_settings.set_border_width(8)
self.page_settings.show()
self.page_help = gtk.VBox(False, 5)
self.page_help.set_border_width(8)
self.page_help.show()
self.notebook.append_page(self.page_latex, gtk.Label("LaTeX"))
self.notebook.append_page(self.page_log, gtk.Label("Log"))
self.notebook.append_page(self.page_settings, gtk.Label("Settings"))
self.notebook.append_page(self.page_help, gtk.Label("Help"))
self.notebook.show()
# First component: The input text view for the LaTeX code.
# It lives in a ScrolledWindow so we can get some scrollbars when the
# text is too long.
self.text = gtk.TextView(self.syntax_buffer)
self.text.get_buffer().set_text(self.src)
self.text.show()
self.text_container = gtk.ScrolledWindow()
self.text_container.set_policy(gtk.POLICY_AUTOMATIC,
gtk.POLICY_AUTOMATIC)
self.text_container.set_shadow_type(gtk.SHADOW_IN)
self.text_container.add(self.text)
self.text_container.set_size_request(400, 200)
self.text_container.show()
self.page_latex.pack_start(self.text_container)
# Second component: The log view
self.log_view = gtk.TextView()
self.log_view.show()
self.log_container = gtk.ScrolledWindow()
self.log_container.set_policy(gtk.POLICY_AUTOMATIC,
gtk.POLICY_AUTOMATIC)
self.log_container.set_shadow_type(gtk.SHADOW_IN)
self.log_container.add(self.log_view)
self.log_container.set_size_request(400, 200)
self.log_container.show()
self.page_log.pack_start(self.log_container)
# third component: settings
self.settings_container = gtk.Table(2,2)
self.settings_container.set_row_spacings(8)
self.settings_container.show()
self.label_preamble = gtk.Label("Preamble")
self.label_preamble.set_alignment(0, 0.5)
self.label_preamble.show()
self.preamble = gtk.FileChooserButton("...")
if 'preamble' in self.settings and os.path.exists(self.settings['preamble']):
self.preamble.set_filename(self.settings['preamble'])
self.preamble.set_action(gtk.FILE_CHOOSER_ACTION_OPEN)
self.preamble.show()
self.settings_container.attach(self.label_preamble, yoptions=gtk.SHRINK,
left_attach=0, right_attach=1, top_attach=0, bottom_attach=1)
self.settings_container.attach(self.preamble, yoptions=gtk.SHRINK,
left_attach=1, right_attach=2, top_attach=0, bottom_attach=1)
self.label_scale = gtk.Label("Scale")
self.label_scale.set_alignment(0, 0.5)
self.label_scale.show()
self.scale_adjustment = gtk.Adjustment(value=1.0, lower=0, upper=100,
step_incr=0.1)
self.scale = gtk.SpinButton(adjustment=self.scale_adjustment, digits=1)
if 'scale' in self.settings:
self.scale.set_value(float(self.settings['scale']))
self.scale.show()
self.settings_container.attach(self.label_scale, yoptions=gtk.SHRINK,
left_attach=0, right_attach=1, top_attach=1, bottom_attach=2)
self.settings_container.attach(self.scale, yoptions=gtk.SHRINK,
left_attach=1, right_attach=2, top_attach=1, bottom_attach=2)
self.page_settings.pack_start(self.settings_container)
# help tab
self.help_label = gtk.Label()
self.help_label.set_markup(Ui.help_text)
self.help_label.set_line_wrap(True)
self.help_label.show()
self.about_label = gtk.Label()
self.about_label.set_markup(Ui.about_text)
self.about_label.set_line_wrap(True)
self.about_label.show()
self.separator_help = gtk.HSeparator()
self.separator_help.show()
self.page_help.pack_start(self.help_label)
self.page_help.pack_start(self.separator_help)
self.page_help.pack_start(self.about_label)
self.box_container.pack_start(self.notebook, True, True)
# separator between buttonbar and notebook
self.separator_buttons = gtk.HSeparator()
self.separator_buttons.show()
self.box_container.pack_start(self.separator_buttons, False, False)
# the button bar
self.box_buttons = gtk.HButtonBox()
self.box_buttons.set_layout(gtk.BUTTONBOX_END)
self.box_buttons.show()
self.button_render = gtk.Button(stock=gtk.STOCK_APPLY)
self.button_cancel = gtk.Button(stock=gtk.STOCK_CLOSE)
self.button_render.set_flags(gtk.CAN_DEFAULT)
self.button_render.connect("clicked", self.render, None)
self.button_cancel.connect("clicked", self.cancel, None)
self.button_render.show()
self.button_cancel.show()
self.box_buttons.pack_end(self.button_cancel)
self.box_buttons.pack_end(self.button_render)
self.box_container.pack_start(self.box_buttons, False, False)
self.window.add(self.box_container)
self.window.set_default(self.button_render)
self.window.show()
def log(self, msg):
buffer = self.log_view.get_buffer()
buffer.set_text(msg)
self.notebook.set_current_page(1)
def main(self):
gtk.main()
|
from __future__ import unicode_literals
from frappe.model.document import Document
class WebPageBlock(Document):
pass
|
import cloudrobotics.message as message
APP_ID = 'SbrApiServices'
PROCESSING_ID = 'RbAppConversationApi'
class ConversationMessage(message.CRFXMessage):
def __init__(self, visitor, visitor_id, talkByMe, type):
super(ConversationMessage, self).__init__()
self.header['RoutingType'] = message.ROUTING_TYPE_CALL
self.header['AppProcessingId'] = PROCESSING_ID
self.header['MessageId'] = type
self.body = {
'visitor': visitor,
'visitor_id': visitor_id,
'talkByMe': talkByMe
}
|
import time
import random
from random import randint
from library import Joystick
import RPi.GPIO as GPIO # remove!!!
from emotions import angry, happy, confused
from library import LEDDisplay
from library import factory
from library import reset_all_hw
global_LegMotor = 70
def remote_func(hw, ns):
print("Remote")
dome = hw['dome']
dome.speed(0)
legs = hw['legs']
legs.drive(1, 0)
legs.drive(2, 0)
flashlight = hw['flashlight']
audio = hw['audio']
audio.speak('start')
while ns.current_state == 3:
print('remote ...')
spd = random.randint(0, 40)
legs.drive(1, spd)
legs.drive(2, spd)
dome.speed(spd)
time.sleep(0.5)
legs.drive(1, 0)
legs.drive(2, 0)
dome.speed(0)
time.sleep(0.1)
return
###### real loop here #####
# Joystick Initialization
js = Joystick()
while ns.current_state == 3:
try:
# Button Initialization
ps4 = js.get()
btnSquare = ps4.buttons[0]
btnTriangle = ps4.buttons[1]
btnCircle = ps4.buttons[2]
btnX = ps4.buttons[3]
btnLeftStickLeftRight = ps4.leftStick.y
btnLeftStickUpDown = ps4.leftStick.x
btnRightStickLeftRight = ps4.rightStick.y
btnRightStickUpDown = ps4.rightStick.x
Left1 = ps4.shoulder[0]
Right1 = ps4.shoulder[1]
Left2 = ps4.triggers.x
Right2 = ps4.triggers.y
hat = ps4.hat
# print("PRINT")
# Button Controls
if hat == 1:
# Happy Emotion
print("Arrow Up Pressed")
happy(leds, servos, dome, audio) # namespace.emotions['happy'](leds, servos, mc, audio)
if hat == 8:
# Confused Emotion
print("Arrow Left Pressed")
confused(leds, servos, dome, audio)
if hat == 2:
# Angry Emotion
print("Arrow Right Pressed")
angry(leds, servos, dome, audio)
if hat == 4:
print("Arrow Down Pressed")
if btnSquare == 1:
# word = random_char(2)
audio.speak_random(2)
time.sleep(0.5)
if btnTriangle == 1:
# FlashLight ON
GPIO.output(26, GPIO.HIGH)
Flash.pwm.set_pwm(15, 0, 130)
if btnCircle == 1:
# FlashLight OFF
GPIO.output(26, GPIO.LOW)
Flash.pwm.set_pwm(15, 0, 0)
if btnX == 1:
for x in [0, 1, 2, 3, 4, 5, 6, 7]:
for y in [0, 1, 2, 3, 4, 5, 6, 7]:
if x == randint(0, 8) or y == randint(0, 8):
for i in range(1, 5):
leds[i].set(x, y, randint(0, 4))
else:
for i in range(1, 5):
leds[i].set(x, y, 4)
for i in range(1, 5):
leds[i].write()
time.sleep(0.1)
for i in range(1, 5):
leds[i].clear()
if Left1 == 1:
# Dome Motor Forward
dome.speed(3200)
time.sleep(2)
dome.speed(0)
if Right1 == 1:
# Dome Motor Backward
dome.speed(-3200)
time.sleep(2)
dome.speed(0)
# if Left1 == 0 or Right1 == 0:
# # Dome Motor Stop
# dome.speed(0)
# if Left2 > 1:
# # Servo Open
# s0.angle = 0
# s1.angle = 0
# s2.angle = 0
# s3.angle = 0
# s4.angle = 0
# Flash.pwm.set_pwm(15, 0, 3000)
#
# if Right2 > 1:
# # Servo Close
# s0.angle = 130
# s1.angle = 130
# s2.angle = 130
# s3.angle = 130
# s4.angle = 130
# Flash.pwm.set_pwm(15, 0, 130)
if Left2 > 1:
for s in servos:
s.angle = 0
time.sleep(0.25)
Flash.pwm.set_pwm(15, 0, 300)
if Right2 > 1:
for s in servos:
s.angle = 130
time.sleep(0.25)
Flash.pwm.set_pwm(15, 0, 130)
if btnLeftStickLeftRight < 0.3 and btnLeftStickLeftRight > -0.3:
legs.drive(1, 0)
if btnRightStickUpDown < 0.3 and btnRightStickUpDown > -0.3:
legs.drive(2, 0)
if btnRightStickUpDown >= 0.3:
# Right and Left Motor Forward
legs.drive(1, btnRightStickUpDown*global_LegMotor)
legs.drive(2, btnRightStickUpDown*-global_LegMotor)
if btnRightStickUpDown <= -0.3:
# Right and Left Motor Backward
legs.drive(1, btnRightStickUpDown*global_LegMotor)
legs.drive(2, btnRightStickUpDown*-global_LegMotor)
if btnLeftStickLeftRight <= 0.3:
# Turn Left
legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
if btnLeftStickLeftRight >= -0.3:
# Turn Right
legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
except KeyboardInterrupt:
print('js exiting ...')
return
# exiting, reset all hw
reset_all_hw(hw)
return
|
import sys
import os
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
sys.path.insert(0, project_root)
import pywim
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'PyWIM'
copyright = u"2016, Ivan Ogasawara"
version = pywim.__version__
release = pywim.__version__
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pywimdoc'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
latex_documents = [
('index', 'pywim.tex',
u'PyWIM Documentation',
u'Ivan Ogasawara', 'manual'),
]
man_pages = [
('index', 'pywim',
u'PyWIM Documentation',
[u'Ivan Ogasawara'], 1)
]
texinfo_documents = [
('index', 'pywim',
u'PyWIM Documentation',
u'Ivan Ogasawara',
'pywim',
'One line description of project.',
'Miscellaneous'),
]
|
__all__ = []
import pkgutil
import inspect
for loader, name, is_pkg in pkgutil.walk_packages(__path__):
module = loader.find_module(name).load_module(name)
for name, value in inspect.getmembers(module):
if name.startswith('__'):
continue
globals()[name] = value
__all__.append(name)
|
import csv
filename = "/tmp/QueryPipelineStatistics.csv"
times = []
with open(filename) as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
for row in reader:
total_time = float(row[-1]) + float(row[-2]) + float(row[-3])
times.append(total_time)
times.sort(reverse=True)
idx_max = len(times) - 1
idx = [round(idx_max / 2),
round(idx_max / 10),
round(idx_max / 100),
round(idx_max / 1000),
0]
tails = [times[x] for x in idx]
print(tails)
|
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.urlresolvers import NoReverseMatch, reverse, resolve, Resolver404
from django.db.models.sql.constants import QUERY_TERMS, LOOKUP_SEP
from django.http import HttpResponse
from django.utils.cache import patch_cache_control
from tastypie.authentication import Authentication
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.cache import NoCache
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.exceptions import NotFound, BadRequest, InvalidFilterError, HydrationError, InvalidSortError, ImmediateHttpResponse
from tastypie.fields import *
from tastypie.http import *
from tastypie.paginator import Paginator
from tastypie.serializers import Serializer
from tastypie.throttle import BaseThrottle
from tastypie.utils import is_valid_jsonp_callback_value, dict_strip_unicode_keys, trailing_slash
from tastypie.utils.mime import determine_format, build_content_type
from tastypie.validation import Validation
try:
set
except NameError:
from sets import Set as set
try:
from django.utils.copycompat import deepcopy
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from copy import deepcopy
def csrf_exempt(func):
return func
class ResourceOptions(object):
"""
A configuration class for ``Resource``.
Provides sane defaults and the logic needed to augment these settings with
the internal ``class Meta`` used on ``Resource`` subclasses.
"""
serializer = Serializer()
authentication = Authentication()
authorization = ReadOnlyAuthorization()
cache = NoCache()
throttle = BaseThrottle()
validation = Validation()
allowed_methods = ['get', 'post', 'put', 'delete']
list_allowed_methods = None
detail_allowed_methods = None
limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20)
api_name = None
resource_name = None
urlconf_namespace = None
default_format = 'application/json'
filtering = {}
ordering = []
object_class = None
queryset = None
fields = []
excludes = []
include_resource_uri = True
include_absolute_url = False
def __new__(cls, meta=None):
overrides = {}
# Handle overrides.
if meta:
for override_name in dir(meta):
# No internals please.
if not override_name.startswith('_'):
overrides[override_name] = getattr(meta, override_name)
allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete'])
if overrides.get('list_allowed_methods', None) is None:
overrides['list_allowed_methods'] = allowed_methods
if overrides.get('detail_allowed_methods', None) is None:
overrides['detail_allowed_methods'] = allowed_methods
if not overrides.get('queryset', None) is None:
overrides['object_class'] = overrides['queryset'].model
return object.__new__(type('ResourceOptions', (cls,), overrides))
class DeclarativeMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = {}
declared_fields = {}
# Inherit any fields from parent(s).
try:
parents = [b for b in bases if issubclass(b, Resource)]
for p in parents:
fields = getattr(p, 'base_fields', {})
for field_name, field_object in fields.items():
attrs['base_fields'][field_name] = deepcopy(field_object)
except NameError:
pass
for field_name, obj in attrs.items():
if isinstance(obj, ApiField):
field = attrs.pop(field_name)
declared_fields[field_name] = field
attrs['base_fields'].update(declared_fields)
attrs['declared_fields'] = declared_fields
new_class = super(DeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
opts = getattr(new_class, 'Meta', None)
new_class._meta = ResourceOptions(opts)
if not getattr(new_class._meta, 'resource_name', None):
# No ``resource_name`` provided. Attempt to auto-name the resource.
class_name = new_class.__name__
name_bits = [bit for bit in class_name.split('Resource') if bit]
resource_name = ''.join(name_bits).lower()
new_class._meta.resource_name = resource_name
if getattr(new_class._meta, 'include_resource_uri', True):
if not 'resource_uri' in new_class.base_fields:
new_class.base_fields['resource_uri'] = CharField(readonly=True)
elif 'resource_uri' in new_class.base_fields and not 'resource_uri' in attrs:
del(new_class.base_fields['resource_uri'])
for field_name, field_object in new_class.base_fields.items():
if hasattr(field_object, 'contribute_to_class'):
field_object.contribute_to_class(new_class, field_name)
return new_class
class Resource(object):
"""
Handles the data, request dispatch and responding to requests.
Serialization/deserialization is handled "at the edges" (i.e. at the
beginning/end of the request/response cycle) so that everything internally
is Python data structures.
This class tries to be non-model specific, so it can be hooked up to other
data sources, such as search results, files, other data, etc.
"""
__metaclass__ = DeclarativeMetaclass
def __init__(self, api_name=None):
self.fields = deepcopy(self.base_fields)
if not api_name is None:
self._meta.api_name = api_name
def __getattr__(self, name):
if name in self.fields:
return self.fields[name]
def wrap_view(self, view):
"""
Wraps methods so they can be called in a more functional way as well
as handling exceptions better.
Note that if ``BadRequest`` or an exception with a ``response`` attr
are seen, there is special handling to either present a message back
to the user or return the response traveling with the exception.
"""
@csrf_exempt
def wrapper(request, *args, **kwargs):
try:
callback = getattr(self, view)
response = callback(request, *args, **kwargs)
if request.is_ajax():
# IE excessively caches XMLHttpRequests, so we're disabling
# the browser cache here.
# See http://www.enhanceie.com/ie/bugs.asp for details.
patch_cache_control(response, no_cache=True)
return response
except (BadRequest, ApiFieldError), e:
return HttpBadRequest(e.args[0])
except Exception, e:
if hasattr(e, 'response'):
return e.response
# A real, non-expected exception.
# Handle the case where the full traceback is more helpful
# than the serialized error.
if settings.DEBUG and getattr(settings, 'TASTYPIE_FULL_DEBUG', False):
raise
# Rather than re-raising, we're going to things similar to
# what Django does. The difference is returning a serialized
# error message.
return self._handle_500(request, e)
return wrapper
def _handle_500(self, request, exception):
import traceback
import sys
the_trace = '\n'.join(traceback.format_exception(*(sys.exc_info())))
if settings.DEBUG:
data = {
"error_message": exception.message,
"traceback": the_trace,
}
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format))
# When DEBUG is False, send an error message to the admins.
from django.core.mail import mail_admins
subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
try:
request_repr = repr(request)
except:
request_repr = "Request repr() unavailable"
message = "%s\n\n%s" % (the_trace, request_repr)
mail_admins(subject, message, fail_silently=True)
# Prep the data going out.
data = {
"error_message": getattr(settings, 'TASTYPIE_CANNED_ERROR', "Sorry, this request could not be processed. Please try again later."),
}
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format))
def _build_reverse_url(self, name, args=None, kwargs=None):
"""
A convenience hook for overriding how URLs are built.
See ``NamespacedModelResource._build_reverse_url`` for an example.
"""
return reverse(name, args=args, kwargs=kwargs)
def base_urls(self):
"""
The standard URLs this ``Resource`` should respond to.
"""
# Due to the way Django parses URLs, ``get_multiple`` won't work without
# a trailing slash.
return [
url(r"^(?P<resource_name>%s)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_list'), name="api_dispatch_list"),
url(r"^(?P<resource_name>%s)/schema%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('get_schema'), name="api_get_schema"),
url(r"^(?P<resource_name>%s)/set/(?P<pk_list>\w[\w/;-]*)/$" % self._meta.resource_name, self.wrap_view('get_multiple'), name="api_get_multiple"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
def override_urls(self):
"""
A hook for adding your own URLs or overriding the default URLs.
"""
return []
@property
def urls(self):
"""
The endpoints this ``Resource`` responds to.
Mostly a standard URLconf, this is suitable for either automatic use
when registered with an ``Api`` class or for including directly in
a URLconf should you choose to.
"""
urls = self.override_urls() + self.base_urls()
urlpatterns = patterns('',
*urls
)
return urlpatterns
def determine_format(self, request):
"""
Used to determine the desired format.
Largely relies on ``tastypie.utils.mime.determine_format`` but here
as a point of extension.
"""
return determine_format(request, self._meta.serializer, default_format=self._meta.default_format)
def serialize(self, request, data, format, options=None):
"""
Given a request, data and a desired format, produces a serialized
version suitable for transfer over the wire.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
options = options or {}
if 'text/javascript' in format:
# get JSONP callback name. default to "callback"
callback = request.GET.get('callback', 'callback')
if not is_valid_jsonp_callback_value(callback):
raise BadRequest('JSONP callback name is invalid.')
options['callback'] = callback
return self._meta.serializer.serialize(data, format, options)
def deserialize(self, request, data, format='application/json'):
"""
Given a request, data and a format, deserializes the given data.
It relies on the request properly sending a ``CONTENT_TYPE`` header,
falling back to ``application/json`` if not provided.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
return self._meta.serializer.deserialize(data, format=request.META.get('CONTENT_TYPE', 'application/json'))
def dispatch_list(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) over
the entire list of resources.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('list', request, **kwargs)
def dispatch_detail(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) on
a single resource.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('detail', request, **kwargs)
def dispatch(self, request_type, request, **kwargs):
"""
Handles the common operations (allowed HTTP method, authentication,
throttling, method lookup) surrounding most CRUD interactions.
"""
allowed_methods = getattr(self._meta, "%s_allowed_methods" % request_type, None)
request_method = self.method_check(request, allowed=allowed_methods)
method = getattr(self, "%s_%s" % (request_method, request_type), None)
if method is None:
raise ImmediateHttpResponse(response=HttpNotImplemented())
self.is_authenticated(request)
self.is_authorized(request)
self.throttle_check(request)
# All clear. Process the request.
request = convert_post_to_put(request)
response = method(request, **kwargs)
# Add the throttled request.
self.log_throttled_access(request)
# If what comes back isn't a ``HttpResponse``, assume that the
# request was accepted and that some action occurred. This also
# prevents Django from freaking out.
if not isinstance(response, HttpResponse):
return HttpAccepted()
return response
def remove_api_resource_names(self, url_dict):
"""
Given a dictionary of regex matches from a URLconf, removes
``api_name`` and/or ``resource_name`` if found.
This is useful for converting URLconf matches into something suitable
for data lookup. For example::
Model.objects.filter(**self.remove_api_resource_names(matches))
"""
kwargs_subset = url_dict.copy()
for key in ['api_name', 'resource_name']:
try:
del(kwargs_subset[key])
except KeyError:
pass
return kwargs_subset
def method_check(self, request, allowed=None):
"""
Ensures that the HTTP method used on the request is allowed to be
handled by the resource.
Takes an ``allowed`` parameter, which should be a list of lowercase
HTTP methods to check against. Usually, this looks like::
# The most generic lookup.
self.method_check(request, self._meta.allowed_methods)
# A lookup against what's allowed for list-type methods.
self.method_check(request, self._meta.list_allowed_methods)
# A useful check when creating a new endpoint that only handles
# GET.
self.method_check(request, ['get'])
"""
if allowed is None:
allowed = []
request_method = request.method.lower()
if not request_method in allowed:
raise ImmediateHttpResponse(response=HttpMethodNotAllowed())
return request_method
def is_authorized(self, request, object=None):
"""
Handles checking of permissions to see if the user has authorization
to GET, POST, PUT, or DELETE this resource. If ``object`` is provided,
the authorization backend can apply additional row-level permissions
checking.
"""
auth_result = self._meta.authorization.is_authorized(request, object)
if isinstance(auth_result, HttpResponse):
raise ImmediateHttpResponse(response=auth_result)
if not auth_result is True:
raise ImmediateHttpResponse(response=HttpUnauthorized())
def is_authenticated(self, request):
"""
Handles checking if the user is authenticated and dealing with
unauthenticated users.
Mostly a hook, this uses class assigned to ``authentication`` from
``Resource._meta``.
"""
# Authenticate the request as needed.
auth_result = self._meta.authentication.is_authenticated(request)
if isinstance(auth_result, HttpResponse):
raise ImmediateHttpResponse(response=auth_result)
if not auth_result is True:
raise ImmediateHttpResponse(response=HttpUnauthorized())
def throttle_check(self, request):
"""
Handles checking if the user should be throttled.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
identifier = self._meta.authentication.get_identifier(request)
# Check to see if they should be throttled.
if self._meta.throttle.should_be_throttled(identifier):
# Throttle limit exceeded.
raise ImmediateHttpResponse(response=HttpForbidden())
def log_throttled_access(self, request):
"""
Handles the recording of the user's access for throttling purposes.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
request_method = request.method.lower()
self._meta.throttle.accessed(self._meta.authentication.get_identifier(request), url=request.get_full_path(), request_method=request_method)
def build_bundle(self, obj=None, data=None):
"""
Given either an object, a data dictionary or both, builds a ``Bundle``
for use throughout the ``dehydrate/hydrate`` cycle.
If no object is provided, an empty object from
``Resource._meta.object_class`` is created so that attempts to access
``bundle.obj`` do not fail.
"""
if obj is None:
obj = self._meta.object_class()
return Bundle(obj, data)
def build_filters(self, filters=None):
"""
Allows for the filtering of applicable objects.
This needs to be implemented at the user level.'
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return filters
def apply_sorting(self, obj_list, options=None):
"""
Allows for the sorting of objects being returned.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return obj_list
# URL-related methods.
def get_resource_uri(self, bundle_or_obj):
"""
This needs to be implemented at the user level.
A ``return reverse("api_dispatch_detail", kwargs={'resource_name':
self.resource_name, 'pk': object.id})`` should be all that would
be needed.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def get_resource_list_uri(self):
"""
Returns a URL specific to this resource's list endpoint.
"""
kwargs = {
'resource_name': self._meta.resource_name,
}
if self._meta.api_name is not None:
kwargs['api_name'] = self._meta.api_name
try:
return self._build_reverse_url("api_dispatch_list", kwargs=kwargs)
except NoReverseMatch:
return None
def get_via_uri(self, uri):
"""
This pulls apart the salient bits of the URI and populates the
resource via a ``obj_get``.
If you need custom behavior based on other portions of the URI,
simply override this method.
"""
try:
view, args, kwargs = resolve(uri)
except Resolver404:
raise NotFound("The URL provided '%s' was not a link to a valid resource." % uri)
return self.obj_get(**self.remove_api_resource_names(kwargs))
# Data preparation.
def full_dehydrate(self, obj):
"""
Given an object instance, extract the information from it to populate
the resource.
"""
bundle = Bundle(obj=obj)
# Dehydrate each field.
for field_name, field_object in self.fields.items():
# A touch leaky but it makes URI resolution work.
if isinstance(field_object, RelatedField):
field_object.api_name = self._meta.api_name
field_object.resource_name = self._meta.resource_name
bundle.data[field_name] = field_object.dehydrate(bundle)
# Check for an optional method to do further dehydration.
method = getattr(self, "dehydrate_%s" % field_name, None)
if method:
bundle.data[field_name] = method(bundle)
bundle = self.dehydrate(bundle)
return bundle
def dehydrate(self, bundle):
"""
A hook to allow a final manipulation of data once all fields/methods
have built out the dehydrated data.
Useful if you need to access more than one dehydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def full_hydrate(self, bundle):
"""
Given a populated bundle, distill it and turn it back into
a full-fledged object instance.
"""
if bundle.obj is None:
bundle.obj = self._meta.object_class()
for field_name, field_object in self.fields.items():
if field_object.attribute:
value = field_object.hydrate(bundle)
if value is not None:
# We need to avoid populating M2M data here as that will
# cause things to blow up.
if not getattr(field_object, 'is_related', False):
setattr(bundle.obj, field_object.attribute, value)
elif not getattr(field_object, 'is_m2m', False):
setattr(bundle.obj, field_object.attribute, value.obj)
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
bundle = method(bundle)
bundle = self.hydrate(bundle)
return bundle
def hydrate(self, bundle):
"""
A hook to allow a final manipulation of data once all fields/methods
have built out the hydrated data.
Useful if you need to access more than one hydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def hydrate_m2m(self, bundle):
"""
Populate the ManyToMany data on the instance.
"""
if bundle.obj is None:
raise HydrationError("You must call 'full_hydrate' before attempting to run 'hydrate_m2m' on %r." % self)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if field_object.attribute:
# Note that we only hydrate the data, leaving the instance
# unmodified. It's up to the user's code to handle this.
# The ``ModelResource`` provides a working baseline
# in this regard.
bundle.data[field_name] = field_object.hydrate_m2m(bundle)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
method(bundle)
return bundle
def build_schema(self):
"""
Returns a dictionary of all the fields on the resource and some
properties about those fields.
Used by the ``schema/`` endpoint to describe what will be available.
"""
data = {
'fields': {},
'default_format': self._meta.default_format,
}
if self._meta.ordering:
data['ordering'] = self._meta.ordering
if self._meta.filtering:
data['filtering'] = self._meta.filtering
for field_name, field_object in self.fields.items():
data['fields'][field_name] = {
'type': field_object.dehydrated_type,
'nullable': field_object.null,
'readonly': field_object.readonly,
'help_text': field_object.help_text,
}
return data
def dehydrate_resource_uri(self, bundle):
"""
For the automatically included ``resource_uri`` field, dehydrate
the URI for the given bundle.
Returns empty string if no URI can be generated.
"""
try:
return self.get_resource_uri(bundle)
except NotImplementedError:
return ''
except NoReverseMatch:
return ''
def generate_cache_key(self, *args, **kwargs):
"""
Creates a unique-enough cache key.
This is based off the current api_name/resource_name/args/kwargs.
"""
smooshed = []
for key, value in kwargs.items():
smooshed.append("%s=%s" % (key, value))
# Use a list plus a ``.join()`` because it's faster than concatenation.
return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), ':'.join(smooshed))
# Data access methods.
def get_object_list(self, request):
"""
A hook to allow making returning the list of available objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def apply_authorization_limits(self, request, object_list):
"""
Allows the ``Authorization`` class to further limit the object list.
Also a hook to customize per ``Resource``.
"""
if hasattr(self._meta.authorization, 'apply_limits'):
object_list = self._meta.authorization.apply_limits(request, object_list)
return object_list
def obj_get_list(self, request=None, **kwargs):
"""
Fetches the list of objects available on the resource.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get_list(self, request=None, **kwargs):
"""
A version of ``obj_get_list`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('list', **kwargs)
obj_list = self._meta.cache.get(cache_key)
if obj_list is None:
obj_list = self.obj_get_list(request=request, **kwargs)
self._meta.cache.set(cache_key, obj_list)
return obj_list
def obj_get(self, request=None, **kwargs):
"""
Fetches an individual object on the resource.
This needs to be implemented at the user level. If the object can not
be found, this should raise a ``NotFound`` exception.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get(self, request=None, **kwargs):
"""
A version of ``obj_get`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('detail', **kwargs)
bundle = self._meta.cache.get(cache_key)
if bundle is None:
bundle = self.obj_get(request=request, **kwargs)
self._meta.cache.set(cache_key, bundle)
return bundle
def obj_create(self, bundle, request=None, **kwargs):
"""
Creates a new object based on the provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_update(self, bundle, request=None, **kwargs):
"""
Updates an existing object (or creates a new object) based on the
provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete_list(self, request=None, **kwargs):
"""
Deletes an entire list of objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete(self, request=None, **kwargs):
"""
Deletes a single object.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def create_response(self, request, data):
"""
Extracts the common "which-format/serialize/return-response" cycle.
Mostly a useful shortcut/hook.
"""
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpResponse(content=serialized, content_type=build_content_type(desired_format))
def is_valid(self, bundle, request=None):
"""
Handles checking if the data provided by the user is valid.
Mostly a hook, this uses class assigned to ``validation`` from
``Resource._meta``.
If validation fails, an error is raised with the error messages
serialized inside it.
"""
errors = self._meta.validation.is_valid(bundle, request)
if len(errors):
if request:
desired_format = self.determine_format(request)
else:
desired_format = self._meta.default_format
serialized = self.serialize(request, errors, desired_format)
response = HttpBadRequest(content=serialized, content_type=build_content_type(desired_format))
raise ImmediateHttpResponse(response=response)
def rollback(self, bundles):
"""
Given the list of bundles, delete all objects pertaining to those
bundles.
This needs to be implemented at the user level. No exceptions should
be raised if possible.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
# Views.
def get_list(self, request, **kwargs):
"""
Returns a serialized list of resources.
Calls ``obj_get_list`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
# TODO: Uncached for now. Invalidation that works for everyone may be
# impossible.
objects = self.obj_get_list(request=request, **self.remove_api_resource_names(kwargs))
sorted_objects = self.apply_sorting(objects, options=request.GET)
paginator = Paginator(request.GET, sorted_objects, resource_uri=self.get_resource_list_uri(),
limit=self._meta.limit)
to_be_serialized = paginator.page()
# Dehydrate the bundles in preparation for serialization.
to_be_serialized['objects'] = [self.full_dehydrate(obj=obj) for obj in to_be_serialized['objects']]
return self.create_response(request, to_be_serialized)
def get_detail(self, request, **kwargs):
"""
Returns a single serialized resource.
Calls ``cached_obj_get/obj_get`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
try:
obj = self.cached_obj_get(request=request, **self.remove_api_resource_names(kwargs))
except ObjectDoesNotExist:
return HttpGone()
except MultipleObjectsReturned:
return HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.full_dehydrate(obj)
return self.create_response(request, bundle)
def put_list(self, request, **kwargs):
"""
Replaces a collection of resources with another collection.
Calls ``delete_list`` to clear out the collection then ``obj_create``
with the provided the data to create the new collection.
Return ``HttpAccepted`` (204 No Content).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
if not 'objects' in deserialized:
raise BadRequest("Invalid data sent.")
self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs))
bundles_seen = []
for object_data in deserialized['objects']:
bundle = self.build_bundle(data=dict_strip_unicode_keys(object_data))
# Attempt to be transactional, deleting any previously created
# objects if validation fails.
try:
self.is_valid(bundle, request)
except ImmediateHttpResponse:
self.rollback(bundles_seen)
raise
self.obj_create(bundle, request=request)
bundles_seen.append(bundle)
return HttpAccepted()
def put_detail(self, request, **kwargs):
"""
Either updates an existing resource or creates a new one with the
provided data.
Calls ``obj_update`` with the provided data first, but falls back to
``obj_create`` if the object does not already exist.
If a new resource is created, return ``HttpCreated`` (201 Created).
If an existing resource is modified, return ``HttpAccepted`` (204 No Content).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized))
self.is_valid(bundle, request)
try:
updated_bundle = self.obj_update(bundle, request=request, pk=kwargs.get('pk'))
return HttpAccepted()
except:
updated_bundle = self.obj_create(bundle, request=request, pk=kwargs.get('pk'))
return HttpCreated(location=self.get_resource_uri(updated_bundle))
def post_list(self, request, **kwargs):
"""
Creates a new resource/object with the provided data.
Calls ``obj_create`` with the provided data and returns a response
with the new resource's location.
If a new resource is created, return ``HttpCreated`` (201 Created).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized))
self.is_valid(bundle, request)
updated_bundle = self.obj_create(bundle, request=request)
return HttpCreated(location=self.get_resource_uri(updated_bundle))
def post_detail(self, request, **kwargs):
"""
Creates a new subcollection of the resource under a resource.
This is not implemented by default because most people's data models
aren't self-referential.
If a new resource is created, return ``HttpCreated`` (201 Created).
"""
return HttpNotImplemented()
def delete_list(self, request, **kwargs):
"""
Destroys a collection of resources/objects.
Calls ``obj_delete_list``.
If the resources are deleted, return ``HttpAccepted`` (204 No Content).
"""
self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs))
return HttpAccepted()
def delete_detail(self, request, **kwargs):
"""
Destroys a single resource/object.
Calls ``obj_delete``.
If the resource is deleted, return ``HttpAccepted`` (204 No Content).
If the resource did not exist, return ``HttpGone`` (410 Gone).
"""
try:
self.obj_delete(request=request, **self.remove_api_resource_names(kwargs))
return HttpAccepted()
except NotFound:
return HttpGone()
def get_schema(self, request, **kwargs):
"""
Returns a serialized form of the schema of the resource.
Calls ``build_schema`` to generate the data. This method only responds
to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
return self.create_response(request, self.build_schema())
def get_multiple(self, request, **kwargs):
"""
Returns a serialized list of resources based on the identifiers
from the URL.
Calls ``obj_get`` to fetch only the objects requested. This method
only responds to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
# Rip apart the list then iterate.
obj_pks = kwargs.get('pk_list', '').split(';')
objects = []
not_found = []
for pk in obj_pks:
try:
obj = self.obj_get(request, pk=pk)
bundle = self.full_dehydrate(obj)
objects.append(bundle)
except ObjectDoesNotExist:
not_found.append(pk)
object_list = {
'objects': objects,
}
if len(not_found):
object_list['not_found'] = not_found
self.log_throttled_access(request)
return self.create_response(request, object_list)
class ModelDeclarativeMetaclass(DeclarativeMetaclass):
def __new__(cls, name, bases, attrs):
new_class = super(ModelDeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
fields = getattr(new_class._meta, 'fields', [])
excludes = getattr(new_class._meta, 'excludes', [])
field_names = new_class.base_fields.keys()
for field_name in field_names:
if field_name == 'resource_uri':
continue
if field_name in new_class.declared_fields:
continue
if len(fields) and not field_name in fields:
del(new_class.base_fields[field_name])
if len(excludes) and field_name in excludes:
del(new_class.base_fields[field_name])
# Add in the new fields.
new_class.base_fields.update(new_class.get_fields(fields, excludes))
if getattr(new_class._meta, 'include_absolute_url', True):
if not 'absolute_url' in new_class.base_fields:
new_class.base_fields['absolute_url'] = CharField(attribute='get_absolute_url', readonly=True)
elif 'absolute_url' in new_class.base_fields and not 'absolute_url' in attrs:
del(new_class.base_fields['absolute_url'])
return new_class
class ModelResource(Resource):
"""
A subclass of ``Resource`` designed to work with Django's ``Models``.
This class will introspect a given ``Model`` and build a field list based
on the fields found on the model (excluding relational fields).
Given that it is aware of Django's ORM, it also handles the CRUD data
operations of the resource.
"""
__metaclass__ = ModelDeclarativeMetaclass
@classmethod
def should_skip_field(cls, field):
"""
Given a Django model field, return if it should be included in the
contributed ApiFields.
"""
# Ignore certain fields (related fields).
if getattr(field, 'rel'):
return True
return False
@classmethod
def api_field_from_django_field(cls, f, default=CharField):
"""
Returns the field type that would likely be associated with each
Django type.
"""
result = default
if f.get_internal_type() in ('DateField', 'DateTimeField'):
result = DateTimeField
elif f.get_internal_type() in ('BooleanField', 'NullBooleanField'):
result = BooleanField
elif f.get_internal_type() in ('DecimalField', 'FloatField'):
result = FloatField
elif f.get_internal_type() in ('IntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SmallIntegerField'):
result = IntegerField
elif f.get_internal_type() in ('FileField', 'ImageField'):
result = FileField
# TODO: Perhaps enable these via introspection. The reason they're not enabled
# by default is the very different ``__init__`` they have over
# the other fields.
# elif f.get_internal_type() == 'ForeignKey':
# result = ForeignKey
# elif f.get_internal_type() == 'ManyToManyField':
# result = ManyToManyField
return result
@classmethod
def get_fields(cls, fields=None, excludes=None):
"""
Given any explicit fields to include and fields to exclude, add
additional fields based on the associated model.
"""
final_fields = {}
fields = fields or []
excludes = excludes or []
if not cls._meta.object_class:
return final_fields
for f in cls._meta.object_class._meta.fields:
# If the field name is already present, skip
if f.name in cls.base_fields:
continue
# If field is not present in explicit field listing, skip
if fields and f.name not in fields:
continue
# If field is in exclude list, skip
if excludes and f.name in excludes:
continue
if cls.should_skip_field(f):
continue
api_field_class = cls.api_field_from_django_field(f)
kwargs = {
'attribute': f.name,
}
if f.null is True:
kwargs['null'] = True
kwargs['unique'] = f.unique
if not f.null and f.blank is True:
kwargs['default'] = ''
if f.get_internal_type() == 'TextField':
kwargs['default'] = ''
if f.has_default():
kwargs['default'] = f.default
final_fields[f.name] = api_field_class(**kwargs)
final_fields[f.name].instance_name = f.name
return final_fields
def build_filters(self, filters=None):
"""
Given a dictionary of filters, create the necessary ORM-level filters.
Keys should be resource fields, **NOT** model fields.
Valid values are either a list of Django filter types (i.e.
``['startswith', 'exact', 'lte']``), the ``ALL`` constant or the
``ALL_WITH_RELATIONS`` constant.
"""
# At the declarative level:
# filtering = {
# 'resource_field_name': ['exact', 'startswith', 'endswith', 'contains'],
# 'resource_field_name_2': ['exact', 'gt', 'gte', 'lt', 'lte', 'range'],
# 'resource_field_name_3': ALL,
# 'resource_field_name_4': ALL_WITH_RELATIONS,
# ...
# }
# Accepts the filters as a dict. None by default, meaning no filters.
if filters is None:
filters = {}
qs_filters = {}
for filter_expr, value in filters.items():
filter_bits = filter_expr.split(LOOKUP_SEP)
if not filter_bits[0] in self.fields:
# It's not a field we know about. Move along citizen.
continue
if not filter_bits[0] in self._meta.filtering:
raise InvalidFilterError("The '%s' field does not allow filtering." % filter_bits[0])
if filter_bits[-1] in QUERY_TERMS.keys():
filter_type = filter_bits.pop()
else:
filter_type = 'exact'
# Check to see if it's allowed lookup type.
if not self._meta.filtering[filter_bits[0]] in (ALL, ALL_WITH_RELATIONS):
# Must be an explicit whitelist.
if not filter_type in self._meta.filtering[filter_bits[0]]:
raise InvalidFilterError("'%s' is not an allowed filter on the '%s' field." % (filter_expr, filter_bits[0]))
# Check to see if it's a relational lookup and if that's allowed.
if len(filter_bits) > 1:
if not self._meta.filtering[filter_bits[0]] == ALL_WITH_RELATIONS:
raise InvalidFilterError("Lookups are not allowed more than one level deep on the '%s' field." % filter_bits[0])
if self.fields[filter_bits[0]].attribute is None:
raise InvalidFilterError("The '%s' field has no 'attribute' for searching with." % filter_bits[0])
if value in ['true', 'True', True]:
value = True
elif value in ['false', 'False', False]:
value = False
elif value in ('nil', 'none', 'None', None):
value = None
db_field_name = LOOKUP_SEP.join([self.fields[filter_bits[0]].attribute] + filter_bits[1:])
qs_filter = "%s%s%s" % (db_field_name, LOOKUP_SEP, filter_type)
qs_filters[qs_filter] = value
return dict_strip_unicode_keys(qs_filters)
def apply_sorting(self, obj_list, options=None):
"""
Given a dictionary of options, apply some ORM-level sorting to the
provided ``QuerySet``.
Looks for the ``sort_by`` key and handles either ascending (just the
field name) or descending (the field name with a ``-`` in front).
The field name should be the resource field, **NOT** model field.
"""
if options is None:
options = {}
if not 'sort_by' in options:
# Nothing to alter the sort order. Return what we've got.
return obj_list
order_by_args = []
if hasattr(options, 'getlist'):
sort_bits = options.getlist('sort_by')
else:
sort_bits = options.get('sort_by')
if not isinstance(sort_bits, (list, tuple)):
sort_bits = [sort_bits]
for sort_by in sort_bits:
sort_by_bits = sort_by.split(LOOKUP_SEP)
field_name = sort_by_bits[0]
order = ''
if sort_by_bits[0].startswith('-'):
field_name = sort_by_bits[0][1:]
order = '-'
if not field_name in self.fields:
# It's not a field we know about. Move along citizen.
raise InvalidSortError("No matching '%s' field for ordering on." % field_name)
if not field_name in self._meta.ordering:
raise InvalidSortError("The '%s' field does not allow ordering." % field_name)
if self.fields[field_name].attribute is None:
raise InvalidSortError("The '%s' field has no 'attribute' for ordering with." % field_name)
order_by_args.append("%s%s" % (order, LOOKUP_SEP.join([self.fields[field_name].attribute] + sort_by_bits[1:])))
return obj_list.order_by(*order_by_args)
def get_object_list(self, request):
"""
An ORM-specific implementation of ``get_object_list``.
Returns a queryset that may have been limited by authorization or other
overrides.
"""
base_object_list = self._meta.queryset
# Limit it as needed.
authed_object_list = self.apply_authorization_limits(request, base_object_list)
return authed_object_list
def obj_get_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``.
Takes an optional ``request`` object, whose ``GET`` dictionary can be
used to narrow the query.
"""
filters = None
if hasattr(request, 'GET'):
filters = request.GET
applicable_filters = self.build_filters(filters=filters)
try:
return self.get_object_list(request).filter(**applicable_filters)
except ValueError, e:
raise NotFound("Invalid resource lookup data provided (mismatched type).")
def obj_get(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
try:
return self.get_object_list(request).get(**kwargs)
except ValueError, e:
raise NotFound("Invalid resource lookup data provided (mismatched type).")
def obj_create(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_create``.
"""
bundle.obj = self._meta.object_class()
for key, value in kwargs.items():
setattr(bundle.obj, key, value)
bundle = self.full_hydrate(bundle)
bundle.obj.save()
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def obj_update(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_update``.
"""
if not bundle.obj or not bundle.obj.pk:
# Attempt to hydrate data from kwargs before doing a lookup for the object.
# This step is needed so certain values (like datetime) will pass model validation.
try:
bundle.obj = self.get_object_list(request).model()
bundle.data.update(kwargs)
bundle = self.full_hydrate(bundle)
lookup_kwargs = kwargs.copy()
lookup_kwargs.update(dict(
(k, getattr(bundle.obj, k))
for k in kwargs.keys()
if getattr(bundle.obj, k) is not None))
except:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.get_object_list(request).get(**lookup_kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
bundle = self.full_hydrate(bundle)
bundle.obj.save()
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def obj_delete_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete_list``.
Takes optional ``kwargs``, which can be used to narrow the query.
"""
self.get_object_list(request).filter(**kwargs).delete()
def obj_delete(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
try:
obj = self.get_object_list(request).get(**kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
obj.delete()
def rollback(self, bundles):
"""
A ORM-specific implementation of ``rollback``.
Given the list of bundles, delete all models pertaining to those
bundles.
"""
for bundle in bundles:
if bundle.obj and getattr(bundle.obj, 'pk', None):
bundle.obj.delete()
def save_m2m(self, bundle):
"""
Handles the saving of related M2M data.
Due to the way Django works, the M2M data must be handled after the
main instance, which is why this isn't a part of the main ``save`` bits.
Currently slightly inefficient in that it will clear out the whole
relation and recreate the related data as needed.
"""
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if not field_object.attribute:
continue
# Get the manager.
related_mngr = getattr(bundle.obj, field_object.attribute)
if hasattr(related_mngr, 'clear'):
# Clear it out, just to be safe.
related_mngr.clear()
related_objs = []
for related_bundle in bundle.data[field_name]:
related_bundle.obj.save()
related_objs.append(related_bundle.obj)
related_mngr.add(*related_objs)
def get_resource_uri(self, bundle_or_obj):
"""
Handles generating a resource URI for a single resource.
Uses the model's ``pk`` in order to create the URI.
"""
kwargs = {
'resource_name': self._meta.resource_name,
}
if isinstance(bundle_or_obj, Bundle):
kwargs['pk'] = bundle_or_obj.obj.pk
else:
kwargs['pk'] = bundle_or_obj.id
if self._meta.api_name is not None:
kwargs['api_name'] = self._meta.api_name
return self._build_reverse_url("api_dispatch_detail", kwargs=kwargs)
class NamespacedModelResource(ModelResource):
"""
A ModelResource subclass that respects Django namespaces.
"""
def _build_reverse_url(self, name, args=None, kwargs=None):
namespaced = "%s:%s" % (self._meta.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs)
def convert_post_to_put(request):
"""
Force Django to process the PUT.
"""
if request.method == "PUT":
if hasattr(request, '_post'):
del request._post
del request._files
try:
request.method = "POST"
request._load_post_and_files()
request.method = "PUT"
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = 'PUT'
request.PUT = request.POST
return request
|
import os
import re
from opsbro.collector import Collector
class Dmidecode(Collector):
def launch(self):
logger = self.logger
logger.debug('getDmidecode: start')
res = {}
# Maybe we are in linux and we can directly read the
linux_dmi_path = '/sys/class/dmi/id/'
if os.path.exists(linux_dmi_path):
file_names = os.listdir(linux_dmi_path)
for fname in file_names:
p = os.path.join(linux_dmi_path, fname)
# There can be a link there, skip them
if os.path.isfile(p):
f = open(p, 'r')
buf = f.read()
f.close()
res[fname] = buf.strip()
logger.debug('getdmidecode: completed, returning')
return res
elif os.name == 'nt':
self.set_not_eligible('Windows is currently not managed for DMI informations')
return False
# Ok not direct access, try to launch with
else: # try dmidecode way, if exists
res = self.execute_shell('LANG=C dmidecode -s')
if res is False:
self.set_not_eligible('Cannot read dmi information')
return False
for p in res.split('\n'):
if re.search('^ ', p):
buf = self.execute_shell('LANG=C dmidecode -s %s' % p).strip()
if 'No such file or directory' in buf:
logger.warning('Cannot access to dmi information with dmidecode command, exiting this collector.')
self.set_not_eligible('Cannot get DMI informations because the dmidecode command is missing.')
return res
res[p.replace('-', '_').strip()] = buf
logger.debug('getdmidecode: completed, returning')
return res
|
import sys
if len(sys.argv) == 1:
print("Syntax: %s <GenAsmWriter.inc> <Output-GenAsmWriter.inc> <Output-GenRegisterName.inc> <arch>" %sys.argv[0])
sys.exit(1)
arch = sys.argv[4]
f = open(sys.argv[1])
lines = f.readlines()
f.close()
f1 = open(sys.argv[2], 'w+')
f2 = open(sys.argv[3], 'w+')
f1.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n")
f1.write("/* By Nguyen Anh Quynh <aquynh@gmail.com>, 2013-2019 */\n")
f1.write("\n")
f2.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n")
f2.write("/* By Nguyen Anh Quynh <aquynh@gmail.com>, 2013-2019 */\n")
f2.write("\n")
need_endif = False
in_getRegisterName = False
in_printAliasInstr = False
fragment_no = None
skip_printing = False
skip_line = 0
skip_count = 0
def replace_getOp(line):
line2 = line
if 'MI->getOperand(0)' in line:
line2 = line.replace('MI->getOperand(0)', 'MCInst_getOperand(MI, 0)')
elif 'MI->getOperand(1)' in line:
line2 = line.replace('MI->getOperand(1)', 'MCInst_getOperand(MI, 1)')
elif 'MI->getOperand(2)' in line:
line2 = line.replace('MI->getOperand(2)', 'MCInst_getOperand(MI, 2)')
elif 'MI->getOperand(3)' in line:
line2 = line.replace('MI->getOperand(3)', 'MCInst_getOperand(MI, 3)')
elif 'MI->getOperand(4)' in line:
line2 = line.replace('MI->getOperand(4)', 'MCInst_getOperand(MI, 4)')
elif 'MI->getOperand(5)' in line:
line2 = line.replace('MI->getOperand(5)', 'MCInst_getOperand(MI, 5)')
elif 'MI->getOperand(6)' in line:
line2 = line.replace('MI->getOperand(6)', 'MCInst_getOperand(MI, 6)')
elif 'MI->getOperand(7)' in line:
line2 = line.replace('MI->getOperand(7)', 'MCInst_getOperand(MI, 7)')
elif 'MI->getOperand(8)' in line:
line2 = line.replace('MI->getOperand(8)', 'MCInst_getOperand(MI, 8)')
return line2
def replace_getReg(line):
line2 = line
if 'MI->getOperand(0).getReg()' in line:
line2 = line.replace('MI->getOperand(0).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 0))')
elif 'MI->getOperand(1).getReg()' in line:
line2 = line.replace('MI->getOperand(1).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 1))')
elif 'MI->getOperand(2).getReg()' in line:
line2 = line.replace('MI->getOperand(2).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 2))')
elif 'MI->getOperand(3).getReg()' in line:
line2 = line.replace('MI->getOperand(3).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 3))')
elif 'MI->getOperand(4).getReg()' in line:
line2 = line.replace('MI->getOperand(4).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 4))')
elif 'MI->getOperand(5).getReg()' in line:
line2 = line.replace('MI->getOperand(5).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 5))')
elif 'MI->getOperand(6).getReg()' in line:
line2 = line.replace('MI->getOperand(6).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 6))')
elif 'MI->getOperand(7).getReg()' in line:
line2 = line.replace('MI->getOperand(7).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 7))')
elif 'MI->getOperand(8).getReg()' in line:
line2 = line.replace('MI->getOperand(8).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 8))')
return line2
def extract_paren(line, text):
i = line.index(text)
return line[line.index('(', i)+1 : line.index(')', i)]
def extract_brackets(line):
if '<' in line:
return line[line.index('<')+1 : line.index('>')]
else:
return ''
def del_brackets(line):
if '<' in line:
return line[:line.index('<')] + line[line.index('>') + 1:]
else:
return line
def print_line(line):
line = line.replace('::', '_')
line = line.replace('nullptr', 'NULL')
if not skip_printing:
if in_getRegisterName:
f2.write(line + "\n")
else:
f1.write(line + "\n")
for line in lines:
line = line.rstrip()
#print("@", line)
# skip Alias
if arch.upper() == 'X86':
if 'PRINT_ALIAS_INSTR' in line:
# done
break
if skip_line:
skip_count += 1
if skip_count <= skip_line:
# skip this line
continue
else:
# skip enough number of lines, reset counters
skip_line = 0
skip_count = 0
if "::printInstruction" in line:
if arch.upper() in ('AARCH64', 'ARM64'):
#print_line("static void printInstruction(MCInst *MI, SStream *O, MCRegisterInfo *MRI)\n{")
print_line("static void printInstruction(MCInst *MI, SStream *O)\n{")
else:
print_line("static void printInstruction(MCInst *MI, SStream *O)\n{")
elif 'const char *AArch64InstPrinter::' in line:
continue
elif 'getRegisterName(' in line:
if 'unsigned AltIdx' in line:
print_line("static const char *getRegisterName(unsigned RegNo, unsigned AltIdx)\n{")
else:
print_line("static const char *getRegisterName(unsigned RegNo)\n{")
elif 'getRegisterName' in line:
in_getRegisterName = True
print_line(line)
elif '::printAliasInstr' in line:
if arch.upper() in ('AARCH64', 'PPC'):
print_line("static char *printAliasInstr(MCInst *MI, SStream *OS, MCRegisterInfo *MRI)\n{")
print_line(' #define GETREGCLASS_CONTAIN(_class, _reg) MCRegisterClass_contains(MCRegisterInfo_getRegClass(MRI, _class), MCOperand_getReg(MCInst_getOperand(MI, _reg)))')
else:
print_line("static bool printAliasInstr(MCInst *MI, SStream *OS)\n{")
print_line(" unsigned int I = 0, OpIdx, PrintMethodIdx;")
print_line(" char *tmpString;")
in_printAliasInstr = True
elif 'STI.getFeatureBits()[' in line:
if arch.upper() == 'ARM':
line2 = line.replace('STI.getFeatureBits()[', 'ARM_getFeatureBits(MI->csh->mode, ')
elif arch.upper() == 'AARCH64':
line2 = line.replace('STI.getFeatureBits()[', 'AArch64_getFeatureBits(')
line2 = line2.replace(']', ')')
print_line(line2)
elif ', STI, ' in line:
line2 = line.replace(', STI, ', ', ')
if 'printSVELogicalImm<' in line:
if 'int16' in line:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm16')
line2 = line2.replace('<int16_t>', '')
elif 'int32' in line:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm32')
line2 = line2.replace('<int32_t>', '')
else:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm64')
line2 = line2.replace('<int64_t>', '')
if 'MI->getOperand(' in line:
line2 = replace_getOp(line2)
# C++ template
if 'printPrefetchOp' in line2:
param = extract_brackets(line2)
if param == '':
param = 'false'
line2 = del_brackets(line2)
line2 = line2.replace(', O);', ', O, %s);' %param)
line2 = line2.replace(', OS);', ', OS, %s);' %param)
elif '<false>' in line2:
line2 = line2.replace('<false>', '')
line2 = line2.replace(', O);', ', O, false);')
line2 = line2.replace('STI, ', '')
elif '<true>' in line:
line2 = line2.replace('<true>', '')
line2 = line2.replace(', O);', ', O, true);')
line2 = line2.replace('STI, ', '')
elif 'printAdrLabelOperand' in line:
# C++ template
if '<0>' in line:
line2 = line2.replace('<0>', '')
line2 = line2.replace(', O);', ', O, 0);')
elif '<1>' in line:
line2 = line2.replace('<1>', '')
line2 = line2.replace(', O);', ', O, 1);')
elif '<2>' in line:
line2 = line2.replace('<2>', '')
line2 = line2.replace(', O);', ', O, 2);')
elif 'printImm8OptLsl' in line2:
param = extract_brackets(line2)
line2 = del_brackets(line2)
if '8' in param or '16' in param or '32' in param:
line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl32')
elif '64' in param:
line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl64')
elif 'printLogicalImm' in line2:
param = extract_brackets(line2)
line2 = del_brackets(line2)
if '8' in param or '16' in param or '32' in param:
line2 = line2.replace('printLogicalImm', 'printLogicalImm32')
elif '64' in param:
line2 = line2.replace('printLogicalImm', 'printLogicalImm64')
elif 'printSVERegOp' in line2 or 'printGPRSeqPairsClassOperand' in line2 or 'printTypedVectorList' in line2 or 'printPostIncOperand' in line2 or 'printImmScale' in line2 or 'printRegWithShiftExtend' in line2 or 'printUImm12Offset' in line2 or 'printExactFPImm' in line2 or 'printMemExtend' in line2 or 'printZPRasFPR' in line2:
param = extract_brackets(line2)
if param == '':
param = '0'
line2 = del_brackets(line2)
line2 = line2.replace(', O);', ', O, %s);' %param)
line2 = line2.replace(', OS);', ', OS, %s);' %param)
elif 'printComplexRotationOp' in line:
# printComplexRotationOp<90, 0>(MI, 5, STI, O);
bracket_content = line2[line2.index('<') + 1 : line2.index('>')]
line2 = line2.replace('<' + bracket_content + '>', '')
line2 = line2.replace(' O);', ' O, %s);' %bracket_content)
print_line(line2)
elif "static const char AsmStrs[]" in line:
print_line("#ifndef CAPSTONE_DIET")
print_line(" static const char AsmStrs[] = {")
need_endif = True
elif "static const char AsmStrsNoRegAltName[]" in line:
print_line("#ifndef CAPSTONE_DIET")
print_line(" static const char AsmStrsNoRegAltName[] = {")
need_endif = True
elif line == ' O << "\\t";':
print_line(" unsigned int opcode = MCInst_getOpcode(MI);")
print_line(' // printf("opcode = %u\\n", opcode);');
elif 'MI->getOpcode()' in line:
if 'switch' in line:
line2 = line.replace('MI->getOpcode()', 'MCInst_getOpcode(MI)')
else:
line2 = line.replace('MI->getOpcode()', 'opcode')
print_line(line2)
elif 'O << ' in line:
if '"' in line:
line2 = line.lower()
line2 = line2.replace('o << ', 'SStream_concat0(O, ');
else:
line2 = line.replace('O << ', 'SStream_concat0(O, ');
line2 = line2.replace("'", '"')
line2 = line2.replace(';', ');')
if '" : "' in line2: # "segment : offset" in X86
line2 = line2.replace('" : "', '":"')
# ARM
print_line(line2)
if '", #0"' in line2:
print_line(' op_addImm(MI, 0);')
if '", #1"' in line2:
print_line(' op_addImm(MI, 1);')
# PowerPC
if '", 268"' in line2:
print_line(' op_addImm(MI, 268);')
elif '", 256"' in line2:
print_line(' op_addImm(MI, 256);')
elif '", 0, "' in line2 or '", 0"' in line2:
print_line(' op_addImm(MI, 0);')
elif '", -1"' in line2:
print_line(' op_addImm(MI, -1);')
if '[' in line2:
if not '[]' in line2:
print_line(' set_mem_access(MI, true);')
if ']' in line2:
if not '[]' in line2:
print_line(' set_mem_access(MI, false);')
if '".f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64);')
elif '".f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32);')
elif '".f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16);')
elif '".s64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S64);')
elif '".s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32);')
elif '".s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16);')
elif '".s8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S8);')
elif '".u64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U64);')
elif '".u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32);')
elif '".u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16);')
elif '".u8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U8);')
elif '".i64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I64);')
elif '".i32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I32);')
elif '".i16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I16);')
elif '".i8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I8);')
elif '".f16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F64);')
elif '".f64.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F16);')
elif '".f16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F32);')
elif '".f32.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F16);')
elif '".f64.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F32);')
elif '".f32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F64);')
elif '".s32.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F32);')
elif '".f32.s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S32);')
elif '".u32.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F32);')
elif '".f32.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U32);')
elif '".p8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_P8);')
elif '".f64.s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S16);')
elif '".s16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F64);')
elif '".f32.s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S16);')
elif '".s16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F32);')
elif '".f64.s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S32);')
elif '".s32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F64);')
elif '".f64.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U16);')
elif '".u16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F64);')
elif '".f32.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U16);')
elif '".u16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F32);')
elif '".f64.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U32);')
elif '".u32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F64);')
elif '".f16.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U32);')
elif '".u32.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F16);')
elif '".f16.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U16);')
elif '".u16.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F16);')
elif '"\\tlr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_LR);')
elif '"\\tapsr_nzcv, fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_APSR_NZCV);')
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '"\\tpc, lr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_PC);')
print_line(' ARM_addReg(MI, ARM_REG_LR);')
elif '"\\tfpscr, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '"\\tfpexc, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPEXC);')
elif '"\\tfpinst, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST);')
elif '"\\tfpinst2, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST2);')
elif '"\\tfpsid, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSID);')
elif '"\\tsp, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SP);')
elif '"\\tsp!, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SP);')
elif '", apsr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_APSR);')
elif '", spsr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SPSR);')
elif '", fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '", fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '", fpexc"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPEXC);')
elif '", fpinst"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST);')
elif '", fpinst2"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST2);')
elif '", fpsid"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSID);')
elif '", mvfr0"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR0);')
elif '", mvfr1"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR1);')
elif '", mvfr2"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR2);')
elif '.8\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 8);')
elif '.16\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 16);')
elif '.32\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 32);')
elif '.64\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 64);')
elif '" ^"' in line2:
print_line(' ARM_addUserMode(MI);')
if '.16b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_16B);')
elif '.8b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8B);')
elif '.4b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4B);')
elif '.b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1B);')
elif '.8h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8H);')
elif '.4h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4H);')
elif '.2h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2H);')
elif '.h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1H);')
elif '.4s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4S);')
elif '.2s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2S);')
elif '.s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1S);')
elif '.2d' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2D);')
elif '.1d' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1D);')
elif '.1q' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1Q);')
if '#0.0' in line2:
print_line(' arm64_op_addFP(MI, 0);')
elif '#0' in line2:
print_line(' arm64_op_addImm(MI, 0);')
elif '#8' in line2:
print_line(' arm64_op_addImm(MI, 8);')
elif '#16' in line2:
print_line(' arm64_op_addImm(MI, 16);')
elif '#32' in line2:
print_line(' arm64_op_addImm(MI, 32);')
# X86
if '", %rax"' in line2 or '", rax"' in line2:
print_line(' op_addReg(MI, X86_REG_RAX);')
elif '", %eax"' in line2 or '", eax"' in line2:
print_line(' op_addReg(MI, X86_REG_EAX);')
elif '", %ax"' in line2 or '", ax"' in line2:
print_line(' op_addReg(MI, X86_REG_AX);')
elif '", %al"' in line2 or '", al"' in line2:
print_line(' op_addReg(MI, X86_REG_AL);')
elif '", %dx"' in line2 or '", dx"' in line2:
print_line(' op_addReg(MI, X86_REG_DX);')
elif '", %st(0)"' in line2 or '", st(0)"' in line2:
print_line(' op_addReg(MI, X86_REG_ST0);')
elif '", 1"' in line2:
print_line(' op_addImm(MI, 1);')
elif '", cl"' in line2:
print_line(' op_addReg(MI, X86_REG_CL);')
elif '"{1to2}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_2);')
elif '"{1to4}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_4);')
elif '"{1to8}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_8);')
elif '"{1to16}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_16);')
elif '{z}{sae}' in line2:
print_line(' op_addAvxSae(MI);')
print_line(' op_addAvxZeroOpmask(MI);')
elif ('{z}' in line2):
print_line(' op_addAvxZeroOpmask(MI);')
elif '{sae}' in line2:
print_line(' op_addAvxSae(MI);')
elif 'llvm_unreachable("Invalid command number.");' in line:
line2 = line.replace('llvm_unreachable("Invalid command number.");', '// unreachable')
print_line(line2)
elif ('assert(' in line) or ('assert (' in line):
pass
elif 'Invalid alt name index' in line:
pass
elif '::' in line and 'case ' in line:
#print_line(line2)
print_line(line)
elif 'MI->getNumOperands()' in line:
line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)')
print_line(line2)
elif 'const MCOperand &MCOp' in line:
line2 = line.replace('const MCOperand &MCOp', 'MCOperand *MCOp')
print_line(line2)
elif 'MI->getOperand(0).isImm()' in line:
line2 = line.replace('MI->getOperand(0).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 0))')
print_line(line2)
elif 'MI->getOperand(1).isImm()' in line:
line2 = line.replace('MI->getOperand(1).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 1))')
print_line(line2)
elif 'MI->getOperand(2).isImm()' in line:
line2 = line.replace('MI->getOperand(2).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 2))')
print_line(line2)
elif 'MI->getOperand(3).isImm()' in line:
line2 = line.replace('MI->getOperand(3).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 3))')
print_line(line2)
elif 'MI->getOperand(4).isImm()' in line:
line2 = line.replace('MI->getOperand(4).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 4))')
print_line(line2)
elif 'MI->getOperand(5).isImm()' in line:
line2 = line.replace('MI->getOperand(5).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 5))')
print_line(line2)
elif 'MI->getOperand(6).isImm()' in line:
line2 = line.replace('MI->getOperand(6).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 6))')
print_line(line2)
elif 'MI->getOperand(7).isImm()' in line:
line2 = line.replace('MI->getOperand(7).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 7))')
print_line(line2)
elif 'MI->getOperand(8).isImm()' in line:
line2 = line.replace('MI->getOperand(8).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 8))')
print_line(line2)
elif 'MI->getOperand(0).getImm()' in line:
line2 = line.replace('MI->getOperand(0).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 0))')
print_line(line2)
elif 'MI->getOperand(1).getImm()' in line:
line2 = line.replace('MI->getOperand(1).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 1))')
print_line(line2)
elif 'MI->getOperand(2).getImm()' in line:
line2 = line.replace('MI->getOperand(2).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 2))')
print_line(line2)
elif 'MI->getOperand(3).getImm()' in line:
line2 = line.replace('MI->getOperand(3).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 3))')
print_line(line2)
elif 'MI->getOperand(4).getImm()' in line:
line2 = line.replace('MI->getOperand(4).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 4))')
print_line(line2)
elif 'MI->getOperand(5).getImm()' in line:
line2 = line.replace('MI->getOperand(5).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 5))')
print_line(line2)
elif 'MI->getOperand(6).getImm()' in line:
line2 = line.replace('MI->getOperand(6).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 6))')
print_line(line2)
elif 'MI->getOperand(7).getImm()' in line:
line2 = line.replace('MI->getOperand(7).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 7))')
print_line(line2)
elif 'MI->getOperand(8).getImm()' in line:
line2 = line.replace('MI->getOperand(8).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 8))')
print_line(line2)
elif 'MRI.getRegClass(' in line:
classid = extract_paren(line, 'getRegClass(')
operand = extract_paren(line, 'getOperand')
line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)')
line2 = ' GETREGCLASS_CONTAIN(%s, %s)' %(classid, operand)
if line.endswith('())) {'):
line2 += ') {'
elif line.endswith(' {'):
line2 += ' {'
elif line.endswith(' &&'):
line2 += ' &&'
print_line(line2)
elif 'MI->getOperand(' in line and 'isReg' in line:
operand = extract_paren(line, 'getOperand')
line2 = ' MCOperand_isReg(MCInst_getOperand(MI, %s))' %(operand)
# MI->getOperand(1).isReg() &&
if line.endswith(' {'):
line2 += ' {'
elif line.endswith(' &&'):
line2 += ' &&'
print_line(line2)
elif 'MI->getOperand(' in line and 'getReg' in line:
line2 = replace_getReg(line)
# one more time
line2 = replace_getReg(line2)
print_line(line2)
elif ' return false;' in line and in_printAliasInstr:
print_line(' return NULL;')
elif 'MCOp.isImm()' in line:
line2 = line.replace('MCOp.isImm()', 'MCOperand_isImm(MCOp)')
print_line(line2)
elif 'MCOp.getImm()' in line:
line2 = line.replace('MCOp.getImm()', 'MCOperand_getImm(MCOp)')
if 'int64_t Val =' in line:
line2 = line2.replace('int64_t Val =', 'Val =')
print_line(line2)
elif 'isSVEMaskOfIdenticalElements<' in line:
if 'int8' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements8')
line2 = line2.replace('<int8_t>', '')
elif 'int16' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements16')
line2 = line2.replace('<int16_t>', '')
elif 'int32' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements32')
line2 = line2.replace('<int32_t>', '')
else:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements64')
line2 = line2.replace('<int64_t>', '')
print_line(line2)
elif 'switch (PredicateIndex) {' in line:
print_line(' int64_t Val;')
print_line(line)
elif 'unsigned I = 0;' in line and in_printAliasInstr:
print_line("""
tmpString = cs_strdup(AsmString);
while (AsmString[I] != ' ' && AsmString[I] != '\\t' &&
AsmString[I] != '$' && AsmString[I] != '\\0')
++I;
tmpString[I] = 0;
SStream_concat0(OS, tmpString);
if (AsmString[I] != '\\0') {
if (AsmString[I] == ' ' || AsmString[I] == '\\t') {
SStream_concat0(OS, " ");
++I;
}
do {
if (AsmString[I] == '$') {
++I;
if (AsmString[I] == (char)0xff) {
++I;
OpIdx = AsmString[I++] - 1;
PrintMethodIdx = AsmString[I++] - 1;
printCustomAliasOperand(MI, OpIdx, PrintMethodIdx, OS);
} else
printOperand(MI, (unsigned)(AsmString[I++]) - 1, OS);
} else {
SStream_concat1(OS, AsmString[I++]);
}
} while (AsmString[I] != '\\0');
}
return tmpString;
}
""")
in_printAliasInstr = False
# skip next few lines
skip_printing = True
elif '::printCustomAliasOperand' in line:
# print again
skip_printing = False
print_line('static void printCustomAliasOperand(')
elif 'const MCSubtargetInfo &STI' in line:
pass
elif 'const MCInst *MI' in line:
line2 = line.replace('const MCInst *MI', 'MCInst *MI')
print_line(line2)
elif 'llvm_unreachable("' in line:
if 'default: ' in line:
print_line(' default:')
elif 'llvm_unreachable("Unknown MCOperandPredicate kind")' in line:
print_line(' return false; // never reach')
else:
pass
elif 'raw_ostream &' in line:
line2 = line.replace('raw_ostream &', 'SStream *')
if line2.endswith(' {'):
line2 = line2.replace(' {', '\n{')
print_line(line2)
elif 'printPredicateOperand(' in line and 'STI, ' in line:
line2 = line.replace('STI, ', '')
print_line(line2)
elif '// Fragment ' in line:
# // Fragment 0 encoded into 6 bits for 51 unique commands.
tmp = line.strip().split(' ')
fragment_no = tmp[2]
print_line(line)
elif ('switch ((' in line or 'if ((' in line) and 'Bits' in line:
# switch ((Bits >> 14) & 63) {
bits = line.strip()
bits = bits.replace('switch ', '')
bits = bits.replace('if ', '')
bits = bits.replace('{', '')
bits = bits.strip()
print_line(' // printf("Fragment %s: %%"PRIu64"\\n", %s);' %(fragment_no, bits))
print_line(line)
elif not skip_printing:
print_line(line)
if line == ' };':
if need_endif and not in_getRegisterName:
# endif only for AsmStrs when we are not inside getRegisterName()
print_line("#endif")
need_endif = False
elif 'return AsmStrs+RegAsmOffset[RegNo-1];' in line:
if in_getRegisterName:
# return NULL for register name on Diet mode
print_line("#else")
print_line(" return NULL;")
print_line("#endif")
print_line("}")
need_endif = False
in_getRegisterName = False
# skip 1 line
skip_line = 1
elif line == ' }':
# ARM64
if in_getRegisterName:
# return NULL for register name on Diet mode
print_line("#else")
print_line(" return NULL;")
print_line("#endif")
print_line("}")
need_endif = False
in_getRegisterName = False
# skip 1 line
skip_line = 1
elif 'default:' in line:
# ARM64
if in_getRegisterName:
# get the size of RegAsmOffsetvreg[]
print_line(" return (const char *)(sizeof(RegAsmOffsetvreg)/sizeof(RegAsmOffsetvreg[0]));")
f1.close()
f2.close()
|
import logging, os
logging.basicConfig(level=logging.INFO)
from deepy.networks import RecursiveAutoEncoder
from deepy.trainers import SGDTrainer, LearningRateAnnealer
from util import get_data, VECTOR_SIZE
model_path = os.path.join(os.path.dirname(__file__), "models", "rae1.gz")
if __name__ == '__main__':
model = RecursiveAutoEncoder(input_dim=VECTOR_SIZE, rep_dim=10)
trainer = SGDTrainer(model)
annealer = LearningRateAnnealer()
trainer.run(get_data(), epoch_controllers=[annealer])
model.save_params(model_path)
|
"""Example of server-side computations used in global forest change analysis.
In this example we will focus on server side computation using NDVI and EVI
data. This both metrics are computed bands created by third party companies
or directly taken by the satellites.
NDVI and EVI are two metrics used in global forest change analysis. They
represent the forest concentration in a specific area. We will use the
MOD13A1 vegetation indice provided by the NASA [1].
The goal is to generate an RGB image, where reds stands for deforestation,
gree for reforestation and blue for masked data (e.g. rivers, oceans...).
[1] https://code.earthengine.google.com/dataset/MODIS/MOD13A1
"""
import ee
ee.Initialize()
rectangle = ee.Geometry.Rectangle(-68, -7, -65, -8)
collection = ee.ImageCollection('MODIS/MOD13A1')
collection = collection.select(['EVI'])
ndvi2000 = collection.filterDate('2000-01-01', '2000-12-31').median()
ndvi2015 = collection.filterDate('2015-01-01', '2015-12-31').median()
difference = ndvi2015.subtract(ndvi2000)
classifiedImage = ee.Image('MODIS/051/MCD12Q1/2001_01_01')
mask = classifiedImage.select(['Land_Cover_Type_1'])
maskedDifference = difference.updateMask(mask)
visualized = maskedDifference.visualize(
min=-2000,
max=2000,
palette='FF0000, 000000, 00FF00',
)
print visualized.getDownloadUrl({
'region': rectangle.toGeoJSONString(),
'scale': 500,
'format': 'png',
})
|
"""Keccak family of cryptographic hash algorithms.
`Keccak`_ is the winning algorithm of the SHA-3 competition organized by NIST.
What eventually became SHA-3 is a variant incompatible to Keccak,
even though the security principles and margins remain the same.
If you are interested in writing SHA-3 compliant code, you must use
the modules ``SHA3_224``, ``SHA3_256``, ``SHA3_384`` or ``SHA3_512``.
This module implements the Keccak hash functions for the 64 bit word
length (b=1600) and the fixed digest sizes of 224, 256, 384 and 512 bits.
>>> from Cryptodome.Hash import keccak
>>>
>>> keccak_hash = keccak.new(digest_bits=512)
>>> keccak_hash.update(b'Some data')
>>> print keccak_hash.hexdigest()
.. _Keccak: http://www.keccak.noekeon.org/Keccak-specifications.pdf
"""
from Cryptodome.Util.py3compat import bord
from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib,
VoidPointer, SmartPointer,
create_string_buffer,
get_raw_buffer, c_size_t,
expect_byte_string)
_raw_keccak_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._keccak",
"""
int keccak_init(void **state,
size_t capacity_bytes,
uint8_t padding_byte);
int keccak_destroy(void *state);
int keccak_absorb(void *state,
const uint8_t *in,
size_t len);
int keccak_squeeze(const void *state,
uint8_t *out,
size_t len);
int keccak_digest(void *state, uint8_t *digest, size_t len);
""")
class Keccak_Hash(object):
"""Class that implements a Keccak hash
"""
def __init__(self, data, digest_bytes, update_after_digest):
#: The size of the resulting hash in bytes.
self.digest_size = digest_bytes
self._update_after_digest = update_after_digest
self._digest_done = False
state = VoidPointer()
result = _raw_keccak_lib.keccak_init(state.address_of(),
c_size_t(self.digest_size * 2),
0x01)
if result:
raise ValueError("Error %d while instantiating keccak" % result)
self._state = SmartPointer(state.get(),
_raw_keccak_lib.keccak_destroy)
if data:
self.update(data)
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Repeated calls are equivalent to a single call with the concatenation
of all the arguments. In other words:
>>> m.update(a); m.update(b)
is equivalent to:
>>> m.update(a+b)
:Parameters:
data : byte string
The next chunk of the message being hashed.
"""
if self._digest_done and not self._update_after_digest:
raise TypeError("You can only call 'digest' or 'hexdigest' on this object")
expect_byte_string(data)
result = _raw_keccak_lib.keccak_absorb(self._state.get(),
data,
c_size_t(len(data)))
if result:
raise ValueError("Error %d while updating keccak" % result)
return self
def digest(self):
"""Return the **binary** (non-printable) digest of the message that has been hashed so far.
You cannot update the hash anymore after the first call to ``digest``
(or ``hexdigest``).
:Return: A byte string of `digest_size` bytes.
It may contain non-ASCII characters, including null bytes.
"""
self._digest_done = True
bfr = create_string_buffer(self.digest_size)
result = _raw_keccak_lib.keccak_digest(self._state.get(),
bfr,
c_size_t(self.digest_size))
if result:
raise ValueError("Error %d while squeezing keccak" % result)
return get_raw_buffer(bfr)
def hexdigest(self):
"""Return the **printable** digest of the message that has been hashed so far.
This method does not change the state of the hash object.
:Return: A string of 2* `digest_size` characters. It contains only
hexadecimal ASCII digits.
"""
return "".join(["%02x" % bord(x) for x in self.digest()])
def new(self, **kwargs):
if "digest_bytes" not in kwargs and "digest_bits" not in kwargs:
kwargs["digest_bytes"] = self.digest_size
return new(**kwargs)
def new(**kwargs):
"""Return a fresh instance of the hash object.
:Keywords:
data : byte string
Optional. The very first chunk of the message to hash.
It is equivalent to an early call to ``update()``.
digest_bytes : integer
The size of the digest, in bytes (28, 32, 48, 64).
digest_bits : integer
The size of the digest, in bits (224, 256, 384, 512).
update_after_digest : boolean
Optional. By default, a hash object cannot be updated anymore after
the digest is computed. When this flag is ``True``, such check
is no longer enforced.
:Return: A `Keccak_Hash` object
"""
data = kwargs.pop("data", None)
update_after_digest = kwargs.pop("update_after_digest", False)
digest_bytes = kwargs.pop("digest_bytes", None)
digest_bits = kwargs.pop("digest_bits", None)
if None not in (digest_bytes, digest_bits):
raise TypeError("Only one digest parameter must be provided")
if (None, None) == (digest_bytes, digest_bits):
raise TypeError("Digest size (bits, bytes) not provided")
if digest_bytes is not None:
if digest_bytes not in (28, 32, 48, 64):
raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64")
else:
if digest_bits not in (224, 256, 384, 512):
raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512")
digest_bytes = digest_bits // 8
if kwargs:
raise TypeError("Unknown parameters: " + str(kwargs))
return Keccak_Hash(data, digest_bytes, update_after_digest)
|
from typing import Dict
import tempfile
from dxlclient.client_config import DxlClientConfig
from dxlclient.client import DxlClient
from dxlclient.broker import Broker
from dxlclient.message import Event
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
INTEGRATION_NAME = "McAfee DXL"
CONNECT_RETRIES = 1
RECONNECT_DELAY = 1
RECONNECT_DELAY_MAX = 10
class EventSender:
TRUST_LEVEL = {
'NOT_SET': '0',
'KNOWN_MALICIOUS': '1',
'MOST_LIKELY_MALICIOUS': '15',
'MIGHT_BE_MALICIOUS': '30',
'UNKNOWN': '50',
'MIGHT_BE_TRUSTED': '70',
'MOST_LIKELY_TRUSTED': '85',
'KNOWN_TRUSTED': '99',
'KNOWN_TRUSTED_INSTALLER': '100'
}
broker_ca_bundle = tempfile.NamedTemporaryFile().name
cert_file = tempfile.NamedTemporaryFile().name
private_key = tempfile.NamedTemporaryFile().name
def __init__(self, params: Dict):
with open(self.broker_ca_bundle, "w") as text_file:
text_file.write(params['broker_ca_bundle'])
with open(self.cert_file, "w") as text_file:
text_file.write(params['cert_file'])
with open(self.private_key, "w") as text_file:
text_file.write(params['private_key'])
if 'broker_urls' in params:
self.broker_urls = params['broker_urls'].split(',')
self.push_ip_topic = params.get('push_ip_topic')
self.push_url_topic = params.get('push_url_topic')
self.push_domain_topic = params.get('push_domain_topic')
self.push_hash_topic = params.get('push_hash_topic')
self.client = DxlClient(self.get_client_config())
self.client.connect()
def __del__(self):
self.client.disconnect()
def push_ip(self, ip, trust_level, topic):
if not is_ip_valid(ip):
raise ValueError(f'argument ip {ip} is not a valid IP')
trust_level_key = self.TRUST_LEVEL[trust_level]
if topic:
self.push_ip_topic = topic
self.send_event(self.push_ip_topic, f'ip:{ip};trust_level:{trust_level_key}')
return f'Successfully pushed ip {ip} with trust level {trust_level}'
def push_url(self, url, trust_level, topic):
trust_level_key = self.TRUST_LEVEL[trust_level]
if topic:
self.push_url_topic = topic
self.send_event(self.push_url_topic, f'url:{url};trust_level:{trust_level_key}')
return f'Successfully pushed url {url} with trust level {trust_level}'
def push_domain(self, domain, trust_level, topic):
trust_level_key = self.TRUST_LEVEL[trust_level]
if topic:
self.push_domain_topic = topic
self.send_event(self.push_domain_topic, f'domain:{domain};trust_level:{trust_level_key}')
return f'Successfully pushed domain {domain} with trust level {trust_level}'
def push_hash(self, hash_obj, trust_level, topic):
trust_level_key = self.TRUST_LEVEL[trust_level]
if topic:
self.push_ip_topic = topic
self.send_event(self.push_hash_topic, f'hash:{hash_obj};trust_level:{trust_level_key}')
return f'Successfully pushed hash {hash_obj} with trust level {trust_level}'
def get_client_config(self):
config = DxlClientConfig(
broker_ca_bundle=self.broker_ca_bundle,
cert_file=self.cert_file,
private_key=self.private_key,
brokers=[Broker.parse(url) for url in self.broker_urls]
)
config.connect_retries = CONNECT_RETRIES
config.reconnect_delay = RECONNECT_DELAY
config.reconnect_delay_max = RECONNECT_DELAY_MAX
return config
def send_event(self, topic, payload):
if not topic:
raise Exception(f'Error in {demisto.command()} topic field is required')
event = Event(topic)
event.payload = str(payload).encode()
self.client.send_event(event)
def send_event_wrapper(self, topic, payload):
self.send_event(topic, payload)
return 'Successfully sent event'
def validate_certificates_format():
if '-----BEGIN PRIVATE KEY-----' not in demisto.params()['private_key']:
return_error(
"The private key content seems to be incorrect as it doesn't start with -----BEGIN PRIVATE KEY-----")
if '-----END PRIVATE KEY-----' not in demisto.params()['private_key']:
return_error(
"The private key content seems to be incorrect as it doesn't end with -----END PRIVATE KEY-----")
if '-----BEGIN CERTIFICATE-----' not in demisto.params()['cert_file']:
return_error("The client certificates content seem to be "
"incorrect as they don't start with '-----BEGIN CERTIFICATE-----'")
if '-----END CERTIFICATE-----' not in demisto.params()['cert_file']:
return_error(
"The client certificates content seem to be incorrect as it doesn't end with -----END CERTIFICATE-----")
if not demisto.params()['broker_ca_bundle'].lstrip(" ").startswith('-----BEGIN CERTIFICATE-----'):
return_error(
"The broker certificate seem to be incorrect as they don't start with '-----BEGIN CERTIFICATE-----'")
if not demisto.params()['broker_ca_bundle'].rstrip(" ").endswith('-----END CERTIFICATE-----'):
return_error(
"The broker certificate seem to be incorrect as they don't end with '-----END CERTIFICATE-----'")
def main():
args = demisto.args()
command = demisto.command()
try:
event_sender = EventSender(demisto.params())
result = ''
if command == 'test-module':
event_sender.send_event('TEST', 'test')
result = 'ok'
elif command == 'dxl-send-event':
result = event_sender.send_event_wrapper(args.get('topic'), args.get('payload'))
elif command == 'dxl-push-ip':
result = event_sender.push_ip(args.get('ip'),
args.get('trust_level'),
args.get('topic'))
elif command == 'dxl-push-url':
result = event_sender.push_url(args.get('url'),
args.get('trust_level'),
args.get('topic'))
elif command == 'dxl-push-domain':
result = event_sender.push_domain(args.get('domain'),
args.get('trust_level'),
args.get('topic'))
elif command == 'dxl-push-hash':
result = event_sender.push_hash(args.get('hash'),
args.get('trust_level'),
args.get('topic'))
else:
raise Exception(f'{demisto.command()} is not a command')
return_outputs(result)
except Exception as error:
validate_certificates_format()
return_error(f'error in {INTEGRATION_NAME} {str(error)}.', error)
if __name__ in ('__builtin__', 'builtins'):
main()
|
"""Tests for wheel binary packages and .dist-info."""
import os
import pytest
from mock import patch, Mock
from pip._vendor import pkg_resources
from pip import pep425tags, wheel
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
from pip.utils import unpack_file
def test_get_entrypoints(tmpdir):
with open(str(tmpdir.join("entry_points.txt")), "w") as fp:
fp.write("""
[console_scripts]
pip = pip.main:pip
""")
assert wheel.get_entrypoints(str(tmpdir.join("entry_points.txt"))) == (
{"pip": "pip.main:pip"},
{},
)
def test_uninstallation_paths():
class dist(object):
def get_metadata_lines(self, record):
return ['file.py,,',
'file.pyc,,',
'file.so,,',
'nopyc.py']
location = ''
d = dist()
paths = list(wheel.uninstallation_paths(d))
expected = ['file.py',
'file.pyc',
'file.so',
'nopyc.py',
'nopyc.pyc']
assert paths == expected
# Avoid an easy 'unique generator' bug
paths2 = list(wheel.uninstallation_paths(d))
assert paths2 == paths
def test_wheel_version(tmpdir, data):
future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'
broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl'
future_version = (1, 9)
unpack_file(data.packages.join(future_wheel),
tmpdir + 'future', None, None)
unpack_file(data.packages.join(broken_wheel),
tmpdir + 'broken', None, None)
assert wheel.wheel_version(tmpdir + 'future') == future_version
assert not wheel.wheel_version(tmpdir + 'broken')
def test_check_compatibility():
name = 'test'
vc = wheel.VERSION_COMPATIBLE
# Major version is higher - should be incompatible
higher_v = (vc[0] + 1, vc[1])
# test raises with correct error
with pytest.raises(UnsupportedWheel) as e:
wheel.check_compatibility(higher_v, name)
assert 'is not compatible' in str(e)
# Should only log.warn - minor version is greator
higher_v = (vc[0], vc[1] + 1)
wheel.check_compatibility(higher_v, name)
# These should work fine
wheel.check_compatibility(wheel.VERSION_COMPATIBLE, name)
# E.g if wheel to install is 1.0 and we support up to 1.2
lower_v = (vc[0], max(0, vc[1] - 1))
wheel.check_compatibility(lower_v, name)
class TestWheelFile(object):
def test_std_wheel_pattern(self):
w = wheel.Wheel('simple-1.1.1-py2-none-any.whl')
assert w.name == 'simple'
assert w.version == '1.1.1'
assert w.pyversions == ['py2']
assert w.abis == ['none']
assert w.plats == ['any']
def test_wheel_pattern_multi_values(self):
w = wheel.Wheel('simple-1.1-py2.py3-abi1.abi2-any.whl')
assert w.name == 'simple'
assert w.version == '1.1'
assert w.pyversions == ['py2', 'py3']
assert w.abis == ['abi1', 'abi2']
assert w.plats == ['any']
def test_wheel_with_build_tag(self):
# pip doesn't do anything with build tags, but theoretically, we might
# see one, in this case the build tag = '4'
w = wheel.Wheel('simple-1.1-4-py2-none-any.whl')
assert w.name == 'simple'
assert w.version == '1.1'
assert w.pyversions == ['py2']
assert w.abis == ['none']
assert w.plats == ['any']
def test_single_digit_version(self):
w = wheel.Wheel('simple-1-py2-none-any.whl')
assert w.version == '1'
def test_missing_version_raises(self):
with pytest.raises(InvalidWheelFilename):
wheel.Wheel('Cython-cp27-none-linux_x86_64.whl')
def test_invalid_filename_raises(self):
with pytest.raises(InvalidWheelFilename):
wheel.Wheel('invalid.whl')
def test_supported_single_version(self):
"""
Test single-version wheel is known to be supported
"""
w = wheel.Wheel('simple-0.1-py2-none-any.whl')
assert w.supported(tags=[('py2', 'none', 'any')])
def test_supported_multi_version(self):
"""
Test multi-version wheel is known to be supported
"""
w = wheel.Wheel('simple-0.1-py2.py3-none-any.whl')
assert w.supported(tags=[('py3', 'none', 'any')])
def test_not_supported_version(self):
"""
Test unsupported wheel is known to be unsupported
"""
w = wheel.Wheel('simple-0.1-py2-none-any.whl')
assert not w.supported(tags=[('py1', 'none', 'any')])
@patch('sys.platform', 'darwin')
@patch('pip.pep425tags.get_abbr_impl', lambda: 'cp')
@patch('pip.pep425tags.get_platform', lambda: 'macosx_10_9_intel')
def test_supported_osx_version(self):
"""
Wheels built for OS X 10.6 are supported on 10.9
"""
tags = pep425tags.get_supported(['27'], False)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_6_intel.whl')
assert w.supported(tags=tags)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_9_intel.whl')
assert w.supported(tags=tags)
@patch('sys.platform', 'darwin')
@patch('pip.pep425tags.get_abbr_impl', lambda: 'cp')
@patch('pip.pep425tags.get_platform', lambda: 'macosx_10_6_intel')
def test_not_supported_osx_version(self):
"""
Wheels built for OS X 10.9 are not supported on 10.6
"""
tags = pep425tags.get_supported(['27'], False)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_9_intel.whl')
assert not w.supported(tags=tags)
@patch('sys.platform', 'darwin')
@patch('pip.pep425tags.get_abbr_impl', lambda: 'cp')
def test_supported_multiarch_darwin(self):
"""
Multi-arch wheels (intel) are supported on components (i386, x86_64)
"""
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_universal'):
universal = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_intel'):
intel = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_x86_64'):
x64 = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_i386'):
i386 = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_ppc'):
ppc = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_ppc64'):
ppc64 = pep425tags.get_supported(['27'], False)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_intel.whl')
assert w.supported(tags=intel)
assert w.supported(tags=x64)
assert w.supported(tags=i386)
assert not w.supported(tags=universal)
assert not w.supported(tags=ppc)
assert not w.supported(tags=ppc64)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_universal.whl')
assert w.supported(tags=universal)
assert w.supported(tags=intel)
assert w.supported(tags=x64)
assert w.supported(tags=i386)
assert w.supported(tags=ppc)
assert w.supported(tags=ppc64)
@patch('sys.platform', 'darwin')
@patch('pip.pep425tags.get_abbr_impl', lambda: 'cp')
def test_not_supported_multiarch_darwin(self):
"""
Single-arch wheels (x86_64) are not supported on multi-arch (intel)
"""
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_universal'):
universal = pep425tags.get_supported(['27'], False)
with patch('pip.pep425tags.get_platform',
lambda: 'macosx_10_5_intel'):
intel = pep425tags.get_supported(['27'], False)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_i386.whl')
assert not w.supported(tags=intel)
assert not w.supported(tags=universal)
w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_x86_64.whl')
assert not w.supported(tags=intel)
assert not w.supported(tags=universal)
def test_support_index_min(self):
"""
Test results from `support_index_min`
"""
tags = [
('py2', 'none', 'TEST'),
('py2', 'TEST', 'any'),
('py2', 'none', 'any'),
]
w = wheel.Wheel('simple-0.1-py2-none-any.whl')
assert w.support_index_min(tags=tags) == 2
w = wheel.Wheel('simple-0.1-py2-none-TEST.whl')
assert w.support_index_min(tags=tags) == 0
def test_support_index_min_none(self):
"""
Test `support_index_min` returns None, when wheel not supported
"""
w = wheel.Wheel('simple-0.1-py2-none-any.whl')
assert w.support_index_min(tags=[]) is None
def test_unpack_wheel_no_flatten(self):
from pip import utils
from tempfile import mkdtemp
from shutil import rmtree
filepath = '../data/packages/meta-1.0-py2.py3-none-any.whl'
if not os.path.exists(filepath):
pytest.skip("%s does not exist" % filepath)
try:
tmpdir = mkdtemp()
utils.unpack_file(filepath, tmpdir, 'application/zip', None)
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
finally:
rmtree(tmpdir)
pass
def test_purelib_platlib(self, data):
"""
Test the "wheel is purelib/platlib" code.
"""
packages = [
("pure_wheel", data.packages.join("pure_wheel-1.7"), True),
("plat_wheel", data.packages.join("plat_wheel-1.7"), False),
]
for name, path, expected in packages:
assert wheel.root_is_purelib(name, path) == expected
def test_version_underscore_conversion(self):
"""
Test that we convert '_' to '-' for versions parsed out of wheel
filenames
"""
w = wheel.Wheel('simple-0.1_1-py2-none-any.whl')
assert w.version == '0.1-1'
class TestPEP425Tags(object):
def test_broken_sysconfig(self):
"""
Test that pep425tags still works when sysconfig is broken.
Can be a problem on Python 2.7
Issue #1074.
"""
import pip.pep425tags
def raises_ioerror(var):
raise IOError("I have the wrong path!")
with patch('pip.pep425tags.sysconfig.get_config_var', raises_ioerror):
assert len(pip.pep425tags.get_supported())
class TestMoveWheelFiles(object):
"""
Tests for moving files from wheel src to scheme paths
"""
def prep(self, data, tmpdir):
self.name = 'sample'
self.wheelpath = data.packages.join(
'sample-1.2.0-py2.py3-none-any.whl')
self.req = pkg_resources.Requirement.parse('sample')
self.src = os.path.join(tmpdir, 'src')
self.dest = os.path.join(tmpdir, 'dest')
unpack_file(self.wheelpath, self.src, None, None)
self.scheme = {
'scripts': os.path.join(self.dest, 'bin'),
'purelib': os.path.join(self.dest, 'lib'),
'data': os.path.join(self.dest, 'data'),
}
self.src_dist_info = os.path.join(
self.src, 'sample-1.2.0.dist-info')
self.dest_dist_info = os.path.join(
self.scheme['purelib'], 'sample-1.2.0.dist-info')
def assert_installed(self):
# lib
assert os.path.isdir(
os.path.join(self.scheme['purelib'], 'sample'))
# dist-info
metadata = os.path.join(self.dest_dist_info, 'METADATA')
assert os.path.isfile(metadata)
# data files
data_file = os.path.join(self.scheme['data'], 'my_data', 'data_file')
assert os.path.isfile(data_file)
# package data
pkg_data = os.path.join(
self.scheme['purelib'], 'sample', 'package_data.dat')
assert os.path.isfile(pkg_data)
def test_std_install(self, data, tmpdir):
self.prep(data, tmpdir)
wheel.move_wheel_files(
self.name, self.req, self.src, scheme=self.scheme)
self.assert_installed()
def test_dist_info_contains_empty_dir(self, data, tmpdir):
"""
Test that empty dirs are not installed
"""
# e.g. https://github.com/pypa/pip/issues/1632#issuecomment-38027275
self.prep(data, tmpdir)
src_empty_dir = os.path.join(
self.src_dist_info, 'empty_dir', 'empty_dir')
os.makedirs(src_empty_dir)
assert os.path.isdir(src_empty_dir)
wheel.move_wheel_files(
self.name, self.req, self.src, scheme=self.scheme)
self.assert_installed()
assert not os.path.isdir(
os.path.join(self.dest_dist_info, 'empty_dir'))
class TestWheelBuilder(object):
def test_skip_building_wheels(self, caplog):
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
wheel_req = Mock(is_wheel=True, editable=False)
reqset = Mock(requirements=Mock(values=lambda: [wheel_req]),
wheel_download_dir='/wheel/dir')
wb = wheel.WheelBuilder(reqset, Mock())
wb.build()
assert "due to already being wheel" in caplog.text()
assert mock_build_one.mock_calls == []
def test_skip_building_editables(self, caplog):
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
editable_req = Mock(editable=True, is_wheel=False)
reqset = Mock(requirements=Mock(values=lambda: [editable_req]),
wheel_download_dir='/wheel/dir')
wb = wheel.WheelBuilder(reqset, Mock())
wb.build()
assert "due to being editable" in caplog.text()
assert mock_build_one.mock_calls == []
|
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0, ignore_toolset_requirements=False)
t.write('jamroot.jam', '''
import toolset ;
import errors ;
rule test-rule ( properties * )
{
return <define>TEST_INDIRECT_CONDITIONAL ;
}
toolset.add-requirements
<define>TEST_MACRO
<conditional>@test-rule
<link>shared:<define>TEST_CONDITIONAL
;
rule check-requirements ( target : sources * : properties * )
{
local macros = TEST_MACRO TEST_CONDITIONAL TEST_INDIRECT_CONDITIONAL ;
for local m in $(macros)
{
if ! <define>$(m) in $(properties)
{
errors.error $(m) not defined ;
}
}
}
make test : : @check-requirements ;
''')
t.run_build_system()
t.cleanup()
|
import os
import sys
from Bio import SeqIO
f = open(sys.argv[1], 'rU')
out = open(sys.argv[2], 'w')
for records in SeqIO.parse(f, 'fastq'):
SeqIO.write(records, out, 'fasta')
|
'''
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
sensor = 0
grovepi.pinMode(sensor,"INPUT")
grove_vcc = 5
while True:
try:
# Get sensor value
sensor_value = grovepi.analogRead(sensor)
# Calculate amplitude current (mA)
amplitude_current = (float)(sensor_value / 1024 * grove_vcc / 800 * 2000000)
# Calculate effective value (mA)
effective_value = amplitude_current / 1.414
# minimum_current = 1 / 1024 * grove_vcc / 800 * 2000000 / 1.414 = 8.6(mA)
# Only for sinusoidal alternating current
print("sensor_value", sensor_value)
print("The amplitude of the current is", amplitude_current, "mA")
print("The effective value of the current is", effective_value, "mA")
time.sleep(1)
except IOError:
print ("Error")
|
import argparse
import asyncio
import gc
import os.path
import pathlib
import socket
import ssl
PRINT = 0
async def echo_server(loop, address, unix):
if unix:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(address)
sock.listen(5)
sock.setblocking(False)
if PRINT:
print('Server listening at', address)
with sock:
while True:
client, addr = await loop.sock_accept(sock)
if PRINT:
print('Connection from', addr)
loop.create_task(echo_client(loop, client))
async def echo_client(loop, client):
try:
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError):
pass
with client:
while True:
data = await loop.sock_recv(client, 1000000)
if not data:
break
await loop.sock_sendall(client, data)
if PRINT:
print('Connection closed')
async def echo_client_streams(reader, writer):
sock = writer.get_extra_info('socket')
try:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError):
pass
if PRINT:
print('Connection from', sock.getpeername())
while True:
data = await reader.read(1000000)
if not data:
break
writer.write(data)
if PRINT:
print('Connection closed')
writer.close()
class EchoProtocol(asyncio.Protocol):
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self.transport = None
def data_received(self, data):
self.transport.write(data)
class EchoBufferedProtocol(asyncio.BufferedProtocol):
def connection_made(self, transport):
self.transport = transport
# Here the buffer is intended to be copied, so that the outgoing buffer
# won't be wrongly updated by next read
self.buffer = bytearray(256 * 1024)
def connection_lost(self, exc):
self.transport = None
def get_buffer(self, sizehint):
return self.buffer
def buffer_updated(self, nbytes):
self.transport.write(self.buffer[:nbytes])
async def print_debug(loop):
while True:
print(chr(27) + "[2J") # clear screen
loop.print_debug_info()
await asyncio.sleep(0.5)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--uvloop', default=False, action='store_true')
parser.add_argument('--streams', default=False, action='store_true')
parser.add_argument('--proto', default=False, action='store_true')
parser.add_argument('--addr', default='127.0.0.1:25000', type=str)
parser.add_argument('--print', default=False, action='store_true')
parser.add_argument('--ssl', default=False, action='store_true')
parser.add_argument('--buffered', default=False, action='store_true')
args = parser.parse_args()
if args.uvloop:
import uvloop
loop = uvloop.new_event_loop()
print('using UVLoop')
else:
loop = asyncio.new_event_loop()
print('using asyncio loop')
asyncio.set_event_loop(loop)
loop.set_debug(False)
if args.print:
PRINT = 1
if hasattr(loop, 'print_debug_info'):
loop.create_task(print_debug(loop))
PRINT = 0
unix = False
if args.addr.startswith('file:'):
unix = True
addr = args.addr[5:]
if os.path.exists(addr):
os.remove(addr)
else:
addr = args.addr.split(':')
addr[1] = int(addr[1])
addr = tuple(addr)
print('serving on: {}'.format(addr))
server_context = None
if args.ssl:
print('with SSL')
if hasattr(ssl, 'PROTOCOL_TLS'):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
else:
server_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
server_context.load_cert_chain(
(pathlib.Path(__file__).parent.parent.parent /
'tests' / 'certs' / 'ssl_cert.pem'),
(pathlib.Path(__file__).parent.parent.parent /
'tests' / 'certs' / 'ssl_key.pem'))
if hasattr(server_context, 'check_hostname'):
server_context.check_hostname = False
server_context.verify_mode = ssl.CERT_NONE
if args.streams:
if args.proto:
print('cannot use --stream and --proto simultaneously')
exit(1)
if args.buffered:
print('cannot use --stream and --buffered simultaneously')
exit(1)
print('using asyncio/streams')
if unix:
coro = asyncio.start_unix_server(echo_client_streams,
addr,
ssl=server_context)
else:
coro = asyncio.start_server(echo_client_streams,
*addr,
ssl=server_context)
srv = loop.run_until_complete(coro)
elif args.proto:
if args.streams:
print('cannot use --stream and --proto simultaneously')
exit(1)
if args.buffered:
print('using buffered protocol')
protocol = EchoBufferedProtocol
else:
print('using simple protocol')
protocol = EchoProtocol
if unix:
coro = loop.create_unix_server(protocol, addr,
ssl=server_context)
else:
coro = loop.create_server(protocol, *addr,
ssl=server_context)
srv = loop.run_until_complete(coro)
else:
if args.ssl:
print('cannot use SSL for loop.sock_* methods')
exit(1)
print('using sock_recv/sock_sendall')
loop.create_task(echo_server(loop, addr, unix))
try:
loop.run_forever()
finally:
if hasattr(loop, 'print_debug_info'):
gc.collect()
print(chr(27) + "[2J")
loop.print_debug_info()
loop.close()
|
from headers.BeaEnginePython import *
from nose.tools import *
class TestSuite:
def test(self):
# EVEX.256.66.0F3A.W0 25 /r ib
# vpternlogd ymm1{k1}{z}, ymm2, ymm3/m256/m32bcst, imm8
myEVEX = EVEX('EVEX.256.66.0F3A.W0')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogd')
assert_equal(myDisasm.repr(), 'vpternlogd ymm28, ymm16, ymmword ptr [r8], 11h')
# EVEX.512.66.0F3A.W0 25 /r ib
# vpternlogd zmm1{k1}{z}, zmm2, zmm3/m512/m32bcst, imm8
myEVEX = EVEX('EVEX.512.66.0F3A.W0')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogd')
assert_equal(myDisasm.repr(), 'vpternlogd zmm28, zmm16, zmmword ptr [r8], 11h')
# EVEX.256.66.0F3A.W1 25 /r ib
# vpternlogq ymm1{k1}{z}, ymm2, ymm3/m256/m64bcst, imm8
myEVEX = EVEX('EVEX.256.66.0F3A.W1')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogq')
assert_equal(myDisasm.repr(), 'vpternlogq ymm28, ymm16, ymmword ptr [r8], 11h')
# EVEX.512.66.0F3A.W1 25 /r ib
# vpternlogq zmm1{k1}{z}, zmm2, zmm3/m512/m64bcst, imm8
myEVEX = EVEX('EVEX.512.66.0F3A.W1')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogq')
assert_equal(myDisasm.repr(), 'vpternlogq zmm28, zmm16, zmmword ptr [r8], 11h')
|
import flask
from donut import auth_utils
from donut.modules.account import blueprint, helpers
@blueprint.route("/request")
def request_account():
"""Provides a form to request an account."""
return flask.render_template("request_account.html")
@blueprint.route("/request/submit", methods=["POST"])
def request_account_submit():
"""Handles an account creation request."""
uid = flask.request.form.get("uid", None)
last_name = flask.request.form.get("last_name", None)
if uid is None or last_name is None:
flask.flash("Invalid request.")
return flask.redirect(flask.url_for("account.request_account"))
success, error_msg = helpers.handle_request_account(uid, last_name)
if success:
flask.flash(
"An email has been sent with a link to create your account.")
return flask.redirect(flask.url_for("home"))
else:
flask.flash(error_msg)
return flask.redirect(flask.url_for("account.request_account"))
@blueprint.route("/create/<create_account_key>")
def create_account(create_account_key):
"""Checks the key. If valid, displays the create account page."""
user_id = auth_utils.check_create_account_key(create_account_key)
if user_id is None:
flask.current_app.logger.warn(
f'Invalid create_account_key: {create_account_key}')
flask.flash("Invalid request. Please check your link and try again.")
return flask.redirect(flask.url_for("home"))
user_data = helpers.get_user_data(user_id)
if user_data is None:
flask.flash("An unexpected error occurred. Please contact DevTeam.")
return flask.redirect(flask.url_for("home"))
return flask.render_template(
"create_account.html", user_data=user_data, key=create_account_key)
@blueprint.route("/create/<create_account_key>/submit", methods=["POST"])
def create_account_submit(create_account_key):
"""Handles a create account request."""
user_id = auth_utils.check_create_account_key(create_account_key)
if user_id is None:
# Key is invalid.
flask.current_app.logger.warn(
f'Invalid create_account_key: {create_account_key}')
flask.flash("Someone's been naughty.")
return flask.redirect(flask.url_for("home"))
username = flask.request.form.get("username", None)
password = flask.request.form.get("password", None)
password2 = flask.request.form.get("password2", None)
if username is None \
or password is None \
or password2 is None:
flask.current_app.logger.warn(
f'Invalid create account form for user ID {user_id}')
flask.flash("Invalid request.")
return flask.redirect(flask.url_for("home"))
if helpers.handle_create_account(user_id, username, password, password2):
flask.session['username'] = username
flask.current_app.logger.info(
f'Created account with username {username} for user ID {user_id}')
flask.flash("Account successfully created.")
return flask.redirect(flask.url_for("home"))
else:
# Flashes already handled.
return flask.redirect(
flask.url_for(
"account.create_account",
create_account_key=create_account_key))
|
from re import sub
from itertools import islice
'''
如何调整字符串的文本格式
'''
with open("./log.log","r") as f:
for line in islice(f,0,None):
#print sub("(\d{4})-(\d{2})-(\d{2})",r"\2/\3/\1",line)
# 可以为每个匹配组起一个别名
print sub("(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})",r"\g<month>/\g<day>/\g<>",line)
|
'''
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
slide = 0 # pin 1 (yellow wire)
led = 1 # pin 2 (white wire)
grovepi.pinMode(slide,"INPUT")
grovepi.pinMode(led,"OUTPUT")
time.sleep(1)
while True:
try:
# Read sensor value from potentiometer
sensor_value = grovepi.analogRead(slide)
# Illuminate onboard LED
if sensor_value > 500:
grovepi.digitalWrite(led,1)
else:
grovepi.digitalWrite(led,0)
print("sensor_value =", sensor_value)
except IOError:
print ("Error")
|
import glob, os, sys
import sipconfig
from PyQt4 import pyqtconfig
def get_diana_version():
depends = filter(lambda line: line.startswith("Depends:"),
open("debian/control").readlines())
for line in depends:
pieces = line.split()
for piece in pieces:
name_pieces = piece.strip(",").split("-")
if len(name_pieces) == 2 and name_pieces[0] == "diana":
return name_pieces[1]
return None
def get_python_diana_version():
line = open("debian/changelog").readline()
pieces = line.split()
return pieces[1][1:-1]
if __name__ == "__main__":
if len(sys.argv) not in (1, 3, 5):
sys.stderr.write("Usage: %s [<directory containing diana headers> <directory containing libdiana>] "
"[<directory containing metlibs headers> <directory containing metlibs libraries>]\n" % sys.argv[0])
sys.exit(1)
if len(sys.argv) == 5:
metlibs_inc_dir = sys.argv[3]
metlibs_lib_dir = sys.argv[4]
else:
metlibs_inc_dir = "/usr/include/metlibs"
metlibs_lib_dir = "/usr/lib"
if len(sys.argv) >= 3:
diana_inc_dir = sys.argv[1]
diana_lib_dir = sys.argv[2]
else:
diana_inc_dir = "/usr/include/diana"
diana_lib_dir = "/usr/lib"
qt_pkg_dir = os.getenv("qt_pkg_dir")
python_diana_pkg_dir = os.getenv("python_diana_pkg_dir")
dest_pkg_dir = os.path.join(python_diana_pkg_dir, "metno")
config = pyqtconfig.Configuration()
# The name of the SIP build file generated by SIP and used by the build
# system.
sip_files_dir = "sip"
modules = ["std", "metlibs", "diana"]
if not os.path.exists("modules"):
os.mkdir("modules")
# Run SIP to generate the code.
output_dirs = []
for module in modules:
output_dir = os.path.join("modules", module)
build_file = module + ".sbf"
build_path = os.path.join(output_dir, build_file)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
sip_file = os.path.join("sip", module, module+".sip")
command = " ".join([config.sip_bin,
"-c", output_dir,
"-b", build_path,
"-I"+config.sip_inc_dir,
"-I"+config.pyqt_sip_dir,
"-I"+diana_inc_dir,
"-I/usr/include",
"-I"+metlibs_inc_dir,
"-I"+qt_pkg_dir+"/include",
"-I"+qt_pkg_dir+"/share/sip/PyQt4",
"-Isip",
config.pyqt_sip_flags,
"-w",
"-o", # generate docstrings for signatures
sip_file])
sys.stdout.write(command+"\n")
sys.stdout.flush()
if os.system(command) != 0:
sys.exit(1)
# Create the Makefile (within the diana directory).
makefile = pyqtconfig.QtGuiModuleMakefile(
config, build_file, dir=output_dir,
install_dir=dest_pkg_dir,
qt=["QtCore", "QtGui", "QtNetwork", "QtXml", "QtXmlPatterns"]
)
if module == "diana":
makefile.extra_include_dirs += [
diana_inc_dir,
os.path.join(diana_inc_dir, "PaintGL"),
metlibs_inc_dir,
qt_pkg_dir+"/include"
]
makefile.extra_lib_dirs += [diana_lib_dir, qt_pkg_dir+"/lib"]
makefile.extra_lflags += ["-Wl,-rpath="+diana_lib_dir, "-Wl,-fPIC"]
makefile.extra_libs += ["diana"]
if module == "metlibs":
makefile.extra_include_dirs.append(diana_inc_dir)
makefile.extra_include_dirs.append("/usr/include/metlibs")
makefile.extra_lib_dirs += [diana_lib_dir, "/usr/lib", metlibs_lib_dir, qt_pkg_dir+"/lib"]
makefile.extra_lflags += ["-Wl,-rpath="+diana_lib_dir, "-Wl,-fPIC"]
makefile.extra_libs += ["miLogger", "coserver", "diana"]
makefile.generate()
output_dirs.append(output_dir)
# Update the metno package version.
diana_version = get_diana_version()
python_diana_version = get_python_diana_version()
if not diana_version or not python_diana_version:
sys.stderr.write("Failed to find version information for Diana (%s) "
"or python-diana (%s)\n" % (repr(diana_version),
repr(python_diana_version)))
sys.exit(1)
f = open("python/metno/versions.py", "w")
f.write('\ndiana_version = "%s"\npython_diana_version = "%s"\n' % (
diana_version, python_diana_version))
# Generate the top-level Makefile.
python_files = glob.glob(os.path.join("python", "metno", "*.py"))
sipconfig.ParentMakefile(
configuration = config,
subdirs = output_dirs,
installs = [(python_files, dest_pkg_dir)]
).generate()
sys.exit()
|
from datetime import datetime
from flask import current_app
from flask.cli import with_appcontext
from invenio_db import db
from hepdata.cli import fix
from hepdata.ext.elasticsearch.api import index_record_ids, push_data_keywords
from hepdata.modules.submission.models import HEPSubmission, DataSubmission
from hepdata.modules.records.utils.common import get_record_by_id
from hepdata.modules.records.utils.doi_minter import generate_doi_for_table
from hepdata.modules.records.utils.submission import finalise_datasubmission
@fix.command()
@with_appcontext
def create_missing_datasubmission_records():
# Get submissions with missing IDs
missing_submissions = DataSubmission.query \
.join(HEPSubmission, HEPSubmission.publication_recid == DataSubmission.publication_recid) \
.filter(
DataSubmission.associated_recid == None,
DataSubmission.publication_inspire_id == None,
DataSubmission.version == HEPSubmission.version,
HEPSubmission.overall_status == 'finished')
missing_submissions = missing_submissions.all()
if not missing_submissions:
print("No datasubmissions found with missing record or inspire ids.")
return
# Organise missing submissions by publication
submissions_by_publication = {}
for submission in missing_submissions:
if submission.publication_recid in submissions_by_publication:
submissions_by_publication[submission.publication_recid].append(submission)
else:
submissions_by_publication[submission.publication_recid] = [submission]
# Loop through each publication
for publication_recid, submissions in submissions_by_publication.items():
publication_record = get_record_by_id(publication_recid)
current_time = "{:%Y-%m-%d %H:%M:%S}".format(datetime.utcnow())
generated_record_ids = []
for submission in submissions:
# Finalise each data submission that does not have a record
finalise_datasubmission(current_time, {},
generated_record_ids,
publication_record, publication_recid,
submission,
submission.version)
# Register the datasubmission's DOI
if not current_app.config.get('TESTING', False):
generate_doi_for_table.delay(submission.doi)
print(f"Generated DOI {submission.doi}")
else:
print(f"Would generate DOI {submission.doi}")
# finalise_datasubmission does not commit, so commit once for each publication
db.session.commit()
# Reindex the publication and its updated datasubmissions
index_record_ids([publication_recid] + generated_record_ids)
push_data_keywords(pub_ids=[publication_recid])
|
"""
Small event module
=======================
"""
import numpy as np
import logging
logger = logging.getLogger(__name__)
from ...utils.decorators import face_lookup
from ...geometry.sheet_geometry import SheetGeometry
from ...topology.sheet_topology import cell_division
from .actions import (
exchange,
remove,
merge_vertices,
detach_vertices,
increase,
decrease,
increase_linear_tension,
)
def reconnect(sheet, manager, **kwargs):
"""Performs reconnections (vertex merging / splitting) following Finegan et al. 2019
kwargs overwrite their corresponding `sheet.settings` entries
Keyword Arguments
-----------------
threshold_length : the threshold length at which vertex merging is performed
p_4 : the probability per unit time to perform a detachement from a rank 4 vertex
p_5p : the probability per unit time to perform a detachement from a rank 5 or more vertex
See Also
--------
**The tricellular vertex-specific adhesion molecule Sidekick
facilitates polarised cell intercalation during Drosophila axis
extension** _Tara M Finegan, Nathan Hervieux, Alexander
Nestor-Bergmann, Alexander G. Fletcher, Guy B Blanchard, Benedicte
Sanson_ bioRxiv 704932; doi: https://doi.org/10.1101/704932
"""
sheet.settings.update(kwargs)
nv = sheet.Nv
merge_vertices(sheet)
if nv != sheet.Nv:
logger.info(f"Merged {nv - sheet.Nv+1} vertices")
nv = sheet.Nv
retval = detach_vertices(sheet)
if retval:
logger.info("Failed to detach, skipping")
if nv != sheet.Nv:
logger.info(f"Detached {sheet.Nv - nv} vertices")
manager.append(reconnect, **kwargs)
default_division_spec = {
"face_id": -1,
"face": -1,
"growth_rate": 0.1,
"critical_vol": 2.0,
"geom": SheetGeometry,
}
@face_lookup
def division(sheet, manager, **kwargs):
"""Cell division happens through cell growth up to a critical volume,
followed by actual division of the face.
Parameters
----------
sheet : a `Sheet` object
manager : an `EventManager` instance
face_id : int,
index of the mother face
growth_rate : float, default 0.1
rate of increase of the prefered volume
critical_vol : float, default 2.
volume at which the cells stops to grow and devides
"""
division_spec = default_division_spec
division_spec.update(**kwargs)
face = division_spec["face"]
division_spec["critical_vol"] *= sheet.specs["face"]["prefered_vol"]
print(sheet.face_df.loc[face, "vol"], division_spec["critical_vol"])
if sheet.face_df.loc[face, "vol"] < division_spec["critical_vol"]:
increase(
sheet, "face", face, division_spec["growth_rate"], "prefered_vol", True
)
manager.append(division, **division_spec)
else:
daughter = cell_division(sheet, face, division_spec["geom"])
sheet.face_df.loc[daughter, "id"] = sheet.face_df.id.max() + 1
default_contraction_spec = {
"face_id": -1,
"face": -1,
"contractile_increase": 1.0,
"critical_area": 1e-2,
"max_contractility": 10,
"multiply": False,
"contraction_column": "contractility",
"unique": True,
}
@face_lookup
def contraction(sheet, manager, **kwargs):
"""Single step contraction event."""
contraction_spec = default_contraction_spec
contraction_spec.update(**kwargs)
face = contraction_spec["face"]
if (sheet.face_df.loc[face, "area"] < contraction_spec["critical_area"]) or (
sheet.face_df.loc[face, contraction_spec["contraction_column"]]
> contraction_spec["max_contractility"]
):
return
increase(
sheet,
"face",
face,
contraction_spec["contractile_increase"],
contraction_spec["contraction_column"],
contraction_spec["multiply"],
)
default_type1_transition_spec = {
"face_id": -1,
"face": -1,
"critical_length": 0.1,
"geom": SheetGeometry,
}
@face_lookup
def type1_transition(sheet, manager, **kwargs):
"""Custom type 1 transition event that tests if
the the shorter edge of the face is smaller than
the critical length.
"""
type1_transition_spec = default_type1_transition_spec
type1_transition_spec.update(**kwargs)
face = type1_transition_spec["face"]
edges = sheet.edge_df[sheet.edge_df["face"] == face]
if min(edges["length"]) < type1_transition_spec["critical_length"]:
exchange(sheet, face, type1_transition_spec["geom"])
default_face_elimination_spec = {"face_id": -1, "face": -1, "geom": SheetGeometry}
@face_lookup
def face_elimination(sheet, manager, **kwargs):
"""Removes the face with if face_id from the sheet."""
face_elimination_spec = default_face_elimination_spec
face_elimination_spec.update(**kwargs)
remove(sheet, face_elimination_spec["face"], face_elimination_spec["geom"])
default_check_tri_face_spec = {"geom": SheetGeometry}
def check_tri_faces(sheet, manager, **kwargs):
"""Three neighbourghs cell elimination
Add all cells with three neighbourghs in the manager
to be eliminated at the next time step.
Parameters
----------
sheet : a :class:`tyssue.sheet` object
manager : a :class:`tyssue.events.EventManager` object
"""
check_tri_faces_spec = default_check_tri_face_spec
check_tri_faces_spec.update(**kwargs)
tri_faces = sheet.face_df[(sheet.face_df["num_sides"] < 4)].id
manager.extend(
[
(face_elimination, {"face_id": f, "geom": check_tri_faces_spec["geom"]})
for f in tri_faces
]
)
default_contraction_line_tension_spec = {
"face_id": -1,
"face": -1,
"shrink_rate": 1.05,
"contractile_increase": 1.0,
"critical_area": 1e-2,
"max_contractility": 10,
"multiply": True,
"contraction_column": "line_tension",
"unique": True,
}
@face_lookup
def contraction_line_tension(sheet, manager, **kwargs):
"""
Single step contraction event
"""
contraction_spec = default_contraction_line_tension_spec
contraction_spec.update(**kwargs)
face = contraction_spec["face"]
if sheet.face_df.loc[face, "area"] < contraction_spec["critical_area"]:
return
# reduce prefered_area
decrease(
sheet,
"face",
face,
contraction_spec["shrink_rate"],
col="prefered_area",
divide=True,
bound=contraction_spec["critical_area"] / 2,
)
increase_linear_tension(
sheet,
face,
contraction_spec["contractile_increase"],
multiply=contraction_spec["multiply"],
isotropic=True,
limit=100,
)
|
from flask import request, jsonify
from sql_classes import UrlList, Acl, UserGroup, User, Role
def _node_base_and_rest(path):
"""
Returns a tuple: (the substring of a path after the last nodeSeparator, the preceding path before it)
If 'base' includes its own baseSeparator - return only a string after it
So if a path is 'OU=Group,OU=Dept,OU=Company', the tuple result would be ('OU=Group,OU=Dept', 'Company')
"""
node_separator = ','
base_separator = '='
node_base = path[path.rfind(node_separator) + 1:]
if path.find(node_separator) != -1:
node_preceding = path[:len(path) - len(node_base) - 1]
else:
node_preceding = ''
return (node_preceding, node_base[node_base.find(base_separator) + 1:])
def _place_user_onto_tree(user, usertree, user_groups):
"""
Places a 'user' object on a 'usertree' object according to user's pathField string key
"""
curr_node = usertree
# Decompose 'OU=Group,OU=Dept,OU=Company' into ('OU=Group,OU=Dept', 'Company')
preceding, base = _node_base_and_rest(user['distinguishedName'])
full_node_path = ''
# Place all path groups onto a tree starting from the outermost
while base != '':
node_found = False
full_node_path = 'OU=' + base + (',' if full_node_path != '' else '') + full_node_path
# Search for corresponding base element on current hierarchy level
for obj in curr_node:
if obj.get('text') == None:
continue
if obj['text'] == base:
node_found = True
curr_node = obj['children']
break
# Create a new group node
if not node_found:
curr_node.append({
'id': 'usergroup_' + str(user_groups[full_node_path]),
'text': base,
'objectType': 'UserGroup',
'children': []
})
curr_node = curr_node[len(curr_node) - 1]['children']
preceding, base = _node_base_and_rest(preceding)
curr_node.append({
'id': 'user_' + str(user['id']),
'text': user['cn'],
'leaf': True,
'iconCls': 'x-fa fa-user' if user['status'] == 1 else 'x-fa fa-user-times',
'objectType': 'User'
})
def _sort_tree(subtree, sort_field):
"""
Sorts a subtree node by a sortField key of each element
"""
# Sort eval function, first by group property, then by text
subtree['children'] = sorted(
subtree['children'],
key=lambda obj: (1 if obj.get('children') == None else 0, obj[sort_field]))
for tree_elem in subtree['children']:
if tree_elem.get('children') != None:
_sort_tree(tree_elem, sort_field)
def _collapse_terminal_nodes(subtree):
"""
Collapses tree nodes which doesn't contain subgroups, just tree leaves
"""
subtree_has_group_nodes = False
for tree_elem in subtree['children']:
if tree_elem.get('children') != None:
subtree_has_group_nodes = True
_collapse_terminal_nodes(tree_elem)
subtree['expanded'] = subtree_has_group_nodes
def _expand_all_nodes(subtree):
"""
Expand all level nodes
"""
for tree_elem in subtree['children']:
if tree_elem.get('children') != None:
_expand_all_nodes(tree_elem)
subtree['expanded'] = True
def _get_user_tree(current_user_properties, Session):
"""
Build user tree
"""
current_user_permissions = current_user_properties['user_permissions']
session = Session()
# Get all groups
query_result = session.query(UserGroup.id, UserGroup.distinguishedName).all()
user_groups = {}
for query_result_row in query_result:
user_groups[query_result_row.distinguishedName] = query_result_row.id
# Get all users if ViewUsers permission present
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewUsers'), None) != None:
query_result = session.query(
User.id.label('user_id'), User.cn, User.status, UserGroup.id.label('usergroup_id'),
UserGroup.distinguishedName).join(UserGroup).filter(User.hidden == 0).all()
# Get just the requester otherwise
else:
query_result = session.query(
User.id.label('user_id'), User.cn, User.status, UserGroup.id.label('usergroup_id'),
UserGroup.distinguishedName).join(UserGroup).\
filter(User.id == current_user_properties['user_object']['id'], User.hidden == 0).all()
Session.remove()
# Future tree
user_tree = []
# Place each user on a tree
for query_result_row in query_result:
user_object = {
'id': query_result_row.user_id,
'distinguishedName': query_result_row.distinguishedName,
'status': query_result_row.status,
'cn': query_result_row.cn
}
_place_user_onto_tree(user_object, user_tree, user_groups)
user_tree = {
'id': 'usergroup_0',
'objectType': 'UserGroup',
'text': 'Пользователи',
'children': user_tree
}
# Sort tree elements
_sort_tree(user_tree, 'text')
# Collapse/expand tree nodes
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewUsers'), None) != None:
_collapse_terminal_nodes(user_tree)
else:
_expand_all_nodes(user_tree)
return user_tree
def _get_url_lists(Session):
"""
Get URL lists
"""
session = Session()
# Get all urllists from DB
query_result = session.query(UrlList.id, UrlList.name, UrlList.whitelist).all()
Session.remove()
urllist_list = []
# Making a list of them
for query_result_row in query_result:
url_list_object = {
'id': 'urllist_' + str(query_result_row.id),
'text': query_result_row.name,
'leaf': True,
'iconCls': 'x-fa fa-unlock' if query_result_row.whitelist else 'x-fa fa-lock',
'objectType': 'UrlList'
}
urllist_list.append(url_list_object)
url_lists = {
'id': 'urllists',
'objectType': 'UrlLists',
'text': 'Списки URL',
'iconCls': 'x-fa fa-cog',
'children': urllist_list
}
# Sort tree elements
_sort_tree(url_lists, 'text')
return url_lists
def _get_acls(Session):
"""
Get ACLs
"""
session = Session()
# Get all access control lists from DB
query_result = session.query(Acl.id, Acl.name).all()
Session.remove()
acl_list = []
# Making a list of them
for query_result_row in query_result:
acl_object = {
'id': 'acl_' + str(query_result_row.id),
'text': query_result_row.name,
'leaf': True,
'iconCls': 'x-fa fa-filter',
'objectType': 'AclContents'
}
acl_list.append(acl_object)
acls = {
'id': 'acls',
'objectType': 'Acls',
'text': 'Списки доступа',
'iconCls': 'x-fa fa-cog',
'children': acl_list
}
# Sort tree elements
_sort_tree(acls, 'text')
return acls
def _get_roles(Session):
"""
Get user roles
"""
session = Session()
# Get all roles from DB
query_result = session.query(Role.id, Role.name).all()
Session.remove()
roles_list = []
# Making a list of them
for query_result_row in query_result:
role_object = {
'id': 'role_' + str(query_result_row.id),
'text': query_result_row.name,
'leaf': True,
'iconCls': 'x-fa fa-key',
'objectType': 'Role'
}
roles_list.append(role_object)
roles = {
'id': 'roles',
'objectType': 'Roles',
'text': 'Роли',
'iconCls': 'x-fa fa-cog',
'children': roles_list
}
# Sorting tree elements
_sort_tree(roles, 'text')
return roles
def select_tree(current_user_properties, node_name, Session):
url_lists_node = None
acls_node = None
roles_node = None
users_node = None
current_user_permissions = current_user_properties['user_permissions']
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewSettings'), None) != None:
if node_name in ['root', 'urllists']:
url_lists_node = _get_url_lists(Session)
if node_name in ['root', 'acls']:
acls_node = _get_acls(Session)
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewPermissions'), None) != None:
if node_name in ['root', 'roles']:
roles_node = _get_roles(Session)
if node_name in ['root']:
users_node = _get_user_tree(current_user_properties, Session)
if node_name == 'root':
children_list = []
if url_lists_node is not None:
children_list.append(url_lists_node)
if acls_node is not None:
children_list.append(acls_node)
if roles_node is not None:
children_list.append(roles_node)
if users_node is not None:
children_list.append(users_node)
result = {
'success': True,
'children': children_list
}
elif node_name == 'urllists':
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewSettings'), None) != None:
result = {
'success': True,
'children': url_lists_node['children']
}
else:
return Response('Forbidden', 403)
elif node_name == 'acls':
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewSettings'), None) != None:
result = {
'success': True,
'children': acls_node['children']
}
else:
return Response('Forbidden', 403)
elif node_name == 'roles':
if next((item for item in current_user_permissions if item['permissionName'] == 'ViewPermissions'), None) != None:
result = {
'success': True,
'children': roles_node['children']
}
else:
return Response('Forbidden', 403)
return jsonify(result)
|
"""
gateway tests - Users
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import unittest
import omero
import gatewaytest.library as lib
from omero.gateway.scripts import dbhelpers
class UserTest (lib.GTest):
def testUsers (self):
self.loginAsUser()
# Try reconnecting without disconnect
self._has_connected = False
self.doConnect()
self.loginAsAuthor()
self.loginAsAdmin()
def testSaveAs (self):
for u in (self.AUTHOR, self.ADMIN):
# Test image should be owned by author
self.loginAsAuthor()
image = self.getTestImage()
ownername = image.getOwnerOmeName()
# Now login as author or admin
self.doLogin(u)
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
image = self.getTestImage()
self.assertEqual(ownername, self.AUTHOR.name)
# Create some object
param = omero.sys.Parameters()
param.map = {'ns': omero.rtypes.rstring('weblitz.UserTest.testSaveAs')}
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 0)
self.gateway.SERVICE_OPTS.setOmeroGroup()
ann = omero.gateway.CommentAnnotationWrapper(conn=self.gateway)
ann.setNs(param.map['ns'].val)
ann.setValue('foo')
ann.saveAs(image.getDetails())
# Annotations are owned by author
self.loginAsAuthor()
try:
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 1)
self.assertEqual(omero.gateway.CommentAnnotationWrapper(self.gateway, anns[0]).getOwnerOmeName(), self.AUTHOR.name)
finally:
self.gateway.getUpdateService().deleteObject(ann._obj)
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 0)
def testCrossGroupSave (self):
self.loginAsUser()
uid = self.gateway.getUserId()
self.loginAsAdmin()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = self.getTestDataset()
did = d.getId()
g = d.getDetails().getGroup()
admin = self.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g._obj])
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
# make sure the group is groupwrite enabled
perms = str(d.getDetails().getGroup().getDetails().permissions)
admin.changePermissions(g._obj, omero.model.PermissionsI('rwrw--'))
d = self.getTestDataset()
g = d.getDetails().getGroup()
self.assert_(g.getDetails().permissions.isGroupWrite())
self.loginAsUser()
# User is now a member of the group to which testDataset belongs, which has groupWrite==True
# But the default group for User is diferent
try:
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = self.getTestDataset()
did = d.getId()
n = d.getName()
d.setName(n+'_1')
d.save()
d = self.gateway.getObject('dataset', did)
self.assertEqual(d.getName(), n+'_1')
d.setName(n)
d.save()
d = self.gateway.getObject('dataset', did)
self.assertEqual(d.getName(), n)
finally:
self.loginAsAdmin()
admin = self.gateway.getAdminService()
# Revert group permissions and remove user from group
admin.changePermissions(g._obj, omero.model.PermissionsI(perms))
admin.removeGroups(omero.model.ExperimenterI(uid, False), [g._obj])
def testCrossGroupRead (self):
self.loginAsAuthor()
u = self.gateway.getUpdateService()
p = self.getTestProject()
self.assertEqual(str(p.getDetails().permissions)[4], '-')
d = p.getDetails()
g = d.getGroup()
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assert_(not g.getId() in self.gateway.getEventContext().memberOfGroups)
self.assertEqual(self.gateway.getObject('project', p.getId()), None)
def testGroupOverObjPermissions (self):
""" Object accesss must be dependent only of group permissions """
ns = 'omero.test.ns'
# Author
self.loginAsAuthor()
# create group with rw----
# create project and annotation in that group
p = dbhelpers.ProjectEntry('testAnnotationPermissions', None, create_group='testAnnotationPermissions', group_perms='rw----')
try:
p = p.create(self.gateway)
except dbhelpers.BadGroupPermissionsException:
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.changePermissions(admin.lookupGroup('testAnnotationPermissions'), omero.model.PermissionsI('rw----'))
self.loginAsAuthor()
p = p.create(self.gateway)
pid = p.getId()
g = p.getDetails().getGroup()._obj
try:
# Admin
# add User to group
self.loginAsUser()
uid = self.gateway.getUserId()
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g])
# User
# try to read project and annotation, which fails
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assertEqual(self.gateway.getObject('project', pid), None)
# Admin
# Chmod project to rwrw--
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.changePermissions(g, omero.model.PermissionsI('rwrw--'))
# Author
# check project has proper permissions
self.loginAsAuthor()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
pa = self.gateway.getObject('project', pid)
self.assertNotEqual(pa, None)
# User
# read project and annotation
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assertNotEqual(self.gateway.getObject('project', pid), None)
finally:
self.loginAsAuthor()
handle = self.gateway.deleteObjects('Project', [p.getId()], deleteAnns=True, deleteChildren=True)
self.waitOnCmd(self.gateway.c, handle)
if __name__ == '__main__':
unittest.main()
|
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:twisted@itamarst.org>}
"""
import sys, os, select, errno
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
|
import copy
import Queue
import os
import socket
import struct
import subprocess
import sys
import threading
import time
import unittest
import dns
import dns.message
import libnacl
import libnacl.utils
class DNSDistTest(unittest.TestCase):
"""
Set up a dnsdist instance and responder threads.
Queries sent to dnsdist are relayed to the responder threads,
who reply with the response provided by the tests themselves
on a queue. Responder threads also queue the queries received
from dnsdist on a separate queue, allowing the tests to check
that the queries sent from dnsdist were as expected.
"""
_dnsDistPort = 5340
_dnsDistListeningAddr = "127.0.0.1"
_testServerPort = 5350
_toResponderQueue = Queue.Queue()
_fromResponderQueue = Queue.Queue()
_queueTimeout = 1
_dnsdistStartupDelay = 2.0
_dnsdist = None
_responsesCounter = {}
_shutUp = True
_config_template = """
"""
_config_params = ['_testServerPort']
_acl = ['127.0.0.1/32']
_consolePort = 5199
_consoleKey = None
@classmethod
def startResponders(cls):
print("Launching responders..")
cls._UDPResponder = threading.Thread(name='UDP Responder', target=cls.UDPResponder, args=[cls._testServerPort])
cls._UDPResponder.setDaemon(True)
cls._UDPResponder.start()
cls._TCPResponder = threading.Thread(name='TCP Responder', target=cls.TCPResponder, args=[cls._testServerPort])
cls._TCPResponder.setDaemon(True)
cls._TCPResponder.start()
@classmethod
def startDNSDist(cls, shutUp=True):
print("Launching dnsdist..")
conffile = 'dnsdist_test.conf'
params = tuple([getattr(cls, param) for param in cls._config_params])
print(params)
with open(conffile, 'w') as conf:
conf.write("-- Autogenerated by dnsdisttests.py\n")
conf.write(cls._config_template % params)
dnsdistcmd = [os.environ['DNSDISTBIN'], '-C', conffile,
'-l', '%s:%d' % (cls._dnsDistListeningAddr, cls._dnsDistPort) ]
for acl in cls._acl:
dnsdistcmd.extend(['--acl', acl])
print(' '.join(dnsdistcmd))
if shutUp:
with open(os.devnull, 'w') as fdDevNull:
cls._dnsdist = subprocess.Popen(dnsdistcmd, close_fds=True, stdout=fdDevNull)
else:
cls._dnsdist = subprocess.Popen(dnsdistcmd, close_fds=True)
if 'DNSDIST_FAST_TESTS' in os.environ:
delay = 0.5
else:
delay = cls._dnsdistStartupDelay
time.sleep(delay)
if cls._dnsdist.poll() is not None:
cls._dnsdist.kill()
sys.exit(cls._dnsdist.returncode)
@classmethod
def setUpSockets(cls):
print("Setting up UDP socket..")
cls._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cls._sock.settimeout(2.0)
cls._sock.connect(("127.0.0.1", cls._dnsDistPort))
@classmethod
def setUpClass(cls):
cls.startResponders()
cls.startDNSDist(cls._shutUp)
cls.setUpSockets()
print("Launching tests..")
@classmethod
def tearDownClass(cls):
if 'DNSDIST_FAST_TESTS' in os.environ:
delay = 0.1
else:
delay = 1.0
if cls._dnsdist:
cls._dnsdist.terminate()
if cls._dnsdist.poll() is None:
time.sleep(delay)
if cls._dnsdist.poll() is None:
cls._dnsdist.kill()
cls._dnsdist.wait()
@classmethod
def _ResponderIncrementCounter(cls):
if threading.currentThread().name in cls._responsesCounter:
cls._responsesCounter[threading.currentThread().name] += 1
else:
cls._responsesCounter[threading.currentThread().name] = 1
@classmethod
def _getResponse(cls, request):
response = None
if len(request.question) != 1:
print("Skipping query with question count %d" % (len(request.question)))
return None
healthcheck = not str(request.question[0].name).endswith('tests.powerdns.com.')
if not healthcheck:
cls._ResponderIncrementCounter()
if not cls._toResponderQueue.empty():
response = cls._toResponderQueue.get(True, cls._queueTimeout)
if response:
response = copy.copy(response)
response.id = request.id
cls._fromResponderQueue.put(request, True, cls._queueTimeout)
if not response:
# unexpected query, or health check
response = dns.message.make_response(request)
return response
@classmethod
def UDPResponder(cls, port, ignoreTrailing=False):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
sock.bind(("127.0.0.1", port))
while True:
data, addr = sock.recvfrom(4096)
request = dns.message.from_wire(data, ignore_trailing=ignoreTrailing)
response = cls._getResponse(request)
if not response:
continue
sock.settimeout(2.0)
sock.sendto(response.to_wire(), addr)
sock.settimeout(None)
sock.close()
@classmethod
def TCPResponder(cls, port, ignoreTrailing=False, multipleResponses=False):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
try:
sock.bind(("127.0.0.1", port))
except socket.error as e:
print("Error binding in the TCP responder: %s" % str(e))
sys.exit(1)
sock.listen(100)
while True:
(conn, _) = sock.accept()
conn.settimeout(2.0)
data = conn.recv(2)
(datalen,) = struct.unpack("!H", data)
data = conn.recv(datalen)
request = dns.message.from_wire(data, ignore_trailing=ignoreTrailing)
response = cls._getResponse(request)
if not response:
conn.close()
continue
wire = response.to_wire()
conn.send(struct.pack("!H", len(wire)))
conn.send(wire)
while multipleResponses:
if cls._toResponderQueue.empty():
break
response = cls._toResponderQueue.get(True, cls._queueTimeout)
if not response:
break
response = copy.copy(response)
response.id = request.id
wire = response.to_wire()
try:
conn.send(struct.pack("!H", len(wire)))
conn.send(wire)
except socket.error as e:
# some of the tests are going to close
# the connection on us, just deal with it
break
conn.close()
sock.close()
@classmethod
def sendUDPQuery(cls, query, response, useQueue=True, timeout=2.0, rawQuery=False):
if useQueue:
cls._toResponderQueue.put(response, True, timeout)
if timeout:
cls._sock.settimeout(timeout)
try:
if not rawQuery:
query = query.to_wire()
cls._sock.send(query)
data = cls._sock.recv(4096)
except socket.timeout:
data = None
finally:
if timeout:
cls._sock.settimeout(None)
receivedQuery = None
message = None
if useQueue and not cls._fromResponderQueue.empty():
receivedQuery = cls._fromResponderQueue.get(True, timeout)
if data:
message = dns.message.from_wire(data)
return (receivedQuery, message)
@classmethod
def openTCPConnection(cls, timeout=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if timeout:
sock.settimeout(timeout)
sock.connect(("127.0.0.1", cls._dnsDistPort))
return sock
@classmethod
def sendTCPQueryOverConnection(cls, sock, query, rawQuery=False):
if not rawQuery:
wire = query.to_wire()
else:
wire = query
sock.send(struct.pack("!H", len(wire)))
sock.send(wire)
@classmethod
def recvTCPResponseOverConnection(cls, sock):
message = None
data = sock.recv(2)
if data:
(datalen,) = struct.unpack("!H", data)
data = sock.recv(datalen)
if data:
message = dns.message.from_wire(data)
return message
@classmethod
def sendTCPQuery(cls, query, response, useQueue=True, timeout=2.0, rawQuery=False):
message = None
if useQueue:
cls._toResponderQueue.put(response, True, timeout)
sock = cls.openTCPConnection(timeout)
try:
cls.sendTCPQueryOverConnection(sock, query, rawQuery)
message = cls.recvTCPResponseOverConnection(sock)
except socket.timeout as e:
print("Timeout: %s" % (str(e)))
except socket.error as e:
print("Network error: %s" % (str(e)))
finally:
sock.close()
receivedQuery = None
if useQueue and not cls._fromResponderQueue.empty():
receivedQuery = cls._fromResponderQueue.get(True, timeout)
return (receivedQuery, message)
@classmethod
def sendTCPQueryWithMultipleResponses(cls, query, responses, useQueue=True, timeout=2.0, rawQuery=False):
if useQueue:
for response in responses:
cls._toResponderQueue.put(response, True, timeout)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if timeout:
sock.settimeout(timeout)
sock.connect(("127.0.0.1", cls._dnsDistPort))
messages = []
try:
if not rawQuery:
wire = query.to_wire()
else:
wire = query
sock.send(struct.pack("!H", len(wire)))
sock.send(wire)
while True:
data = sock.recv(2)
if not data:
break
(datalen,) = struct.unpack("!H", data)
data = sock.recv(datalen)
messages.append(dns.message.from_wire(data))
except socket.timeout as e:
print("Timeout: %s" % (str(e)))
except socket.error as e:
print("Network error: %s" % (str(e)))
finally:
sock.close()
receivedQuery = None
if useQueue and not cls._fromResponderQueue.empty():
receivedQuery = cls._fromResponderQueue.get(True, timeout)
return (receivedQuery, messages)
def setUp(self):
# This function is called before every tests
# Clear the responses counters
for key in self._responsesCounter:
self._responsesCounter[key] = 0
# Make sure the queues are empty, in case
# a previous test failed
while not self._toResponderQueue.empty():
self._toResponderQueue.get(False)
while not self._fromResponderQueue.empty():
self._fromResponderQueue.get(False)
@classmethod
def clearToResponderQueue(cls):
while not cls._toResponderQueue.empty():
cls._toResponderQueue.get(False)
@classmethod
def clearFromResponderQueue(cls):
while not cls._fromResponderQueue.empty():
cls._fromResponderQueue.get(False)
@classmethod
def clearResponderQueues(cls):
cls.clearToResponderQueue()
cls.clearFromResponderQueue()
@staticmethod
def generateConsoleKey():
return libnacl.utils.salsa_key()
@classmethod
def _encryptConsole(cls, command, nonce):
if cls._consoleKey is None:
return command
return libnacl.crypto_secretbox(command, nonce, cls._consoleKey)
@classmethod
def _decryptConsole(cls, command, nonce):
if cls._consoleKey is None:
return command
return libnacl.crypto_secretbox_open(command, nonce, cls._consoleKey)
@classmethod
def sendConsoleCommand(cls, command, timeout=1.0):
ourNonce = libnacl.utils.rand_nonce()
theirNonce = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if timeout:
sock.settimeout(timeout)
sock.connect(("127.0.0.1", cls._consolePort))
sock.send(ourNonce)
theirNonce = sock.recv(len(ourNonce))
halfNonceSize = len(ourNonce) / 2
readingNonce = ourNonce[0:halfNonceSize] + theirNonce[halfNonceSize:]
writingNonce = theirNonce[0:halfNonceSize] + ourNonce[halfNonceSize:]
msg = cls._encryptConsole(command, writingNonce)
sock.send(struct.pack("!I", len(msg)))
sock.send(msg)
data = sock.recv(4)
(responseLen,) = struct.unpack("!I", data)
data = sock.recv(responseLen)
response = cls._decryptConsole(data, readingNonce)
return response
|
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
from urbansim.length_constants import UrbanSimLength, UrbanSimLengthConstants
from numpy import array
class is_near_arterial(Variable):
"""Boolean indicating if this gridcell is near an arterial, as specified by the arterial
threshold (a constant). Distance is assumed to be measured from the "border" of the gridcell."""
distance_to_arterial = "distance_to_arterial"
def dependencies(self):
return [my_attribute_label(self.distance_to_arterial)]
def compute(self, dataset_pool):
return get_is_near_arterial(self.get_dataset().get_attribute(self.distance_to_arterial),
dataset_pool.get_dataset('urbansim_constant'))
def post_check(self, values, dataset_pool):
self.do_check("x == False or x == True", values)
def get_is_near_arterial(distance, urbansim_constant):
length = UrbanSimLength(distance, urbansim_constant["gridcell_width"].units)
return length.less_than(urbansim_constant["near_arterial_threshold_unit"])
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs( self ):
# Assumes distance is measured from the gridcell border to the arterial.
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
'gridcell':{
'grid_id': array([1,2,3,4,5,6]),
'distance_to_arterial': array([0.0, 50.0, 99.0, 100.0, 101.0, 200.0]),
},
'urbansim_constant':{
'cell_size': array([150]),
'near_arterial_threshold': array([100]),
'units': array(['meters']),
}
}
)
should_be = array( [True, True, True, False, False, False] )
tester.test_is_equal_for_variable_defined_by_this_module(self, should_be)
if __name__=='__main__':
opus_unittest.main()
|
import random
from collections import namedtuple
import dateparser
import pytest
from cfme import test_requirements
from cfme.containers.image import Image
from cfme.containers.provider import ContainersProvider
from cfme.containers.provider import ContainersTestItem
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.wait import wait_for
pytestmark = [
pytest.mark.meta(server_roles='+smartproxy'),
pytest.mark.usefixtures('setup_provider'),
pytest.mark.tier(1),
pytest.mark.provider([ContainersProvider], scope='function'),
test_requirements.containers
]
AttributeToVerify = namedtuple('AttributeToVerify', ['table', 'attr', 'verifier'])
TESTED_ATTRIBUTES__openscap_off = (
AttributeToVerify('configuration', 'OpenSCAP Results', bool),
AttributeToVerify('configuration', 'OpenSCAP HTML', lambda val: val == 'Available'),
AttributeToVerify('configuration', 'Last scan', dateparser.parse)
)
TESTED_ATTRIBUTES__openscap_on = TESTED_ATTRIBUTES__openscap_off + (
AttributeToVerify('compliance', 'Status', lambda val: val.lower() != 'never verified'),
AttributeToVerify('compliance', 'History', lambda val: val == 'Available')
)
TEST_ITEMS = (
ContainersTestItem(Image, 'openscap_off', is_openscap=False,
tested_attr=TESTED_ATTRIBUTES__openscap_off),
ContainersTestItem(Image, 'openscap_on', is_openscap=True,
tested_attr=TESTED_ATTRIBUTES__openscap_on)
)
NUM_SELECTED_IMAGES = 1
@pytest.fixture(scope='function')
def delete_all_container_tasks(appliance):
col = appliance.collections.tasks.filter({'tab': 'AllTasks'})
col.delete_all()
@pytest.fixture(scope='function')
def random_image_instance(appliance):
collection = appliance.collections.container_images
# add filter for select only active(not archived) images from redHat registry
filter_image_collection = collection.filter({'active': True, 'redhat_registry': True})
return random.sample(filter_image_collection.all(), NUM_SELECTED_IMAGES).pop()
@pytest.mark.polarion('10030')
def test_manage_policies_navigation(random_image_instance):
"""
Polarion:
assignee: juwatts
caseimportance: high
casecomponent: Containers
initialEstimate: 1/6h
"""
random_image_instance.assign_policy_profiles('OpenSCAP profile')
@pytest.mark.polarion('10031')
def test_check_compliance(random_image_instance):
"""
Polarion:
assignee: juwatts
caseimportance: high
casecomponent: Containers
initialEstimate: 1/6h
"""
random_image_instance.assign_policy_profiles('OpenSCAP profile')
random_image_instance.check_compliance()
def get_table_attr(instance, table_name, attr):
# Trying to read the table <table_name> attribute <attr>
view = navigate_to(instance, 'Details', force=True)
table = getattr(view.entities, table_name, None)
if table:
return table.read().get(attr)
@pytest.mark.parametrize(('test_item'), TEST_ITEMS)
def test_containers_smartstate_analysis(provider, test_item, soft_assert,
delete_all_container_tasks,
random_image_instance):
"""
Polarion:
assignee: juwatts
caseimportance: high
casecomponent: Containers
initialEstimate: 1/6h
"""
if test_item.is_openscap:
random_image_instance.assign_policy_profiles('OpenSCAP profile')
else:
random_image_instance.unassign_policy_profiles('OpenSCAP profile')
random_image_instance.perform_smartstate_analysis(wait_for_finish=True)
view = navigate_to(random_image_instance, 'Details')
for tbl, attr, verifier in test_item.tested_attr:
table = getattr(view.entities, tbl)
table_data = {k.lower(): v for k, v in table.read().items()}
if not soft_assert(attr.lower() in table_data,
f'{tbl} table has missing attribute \'{attr}\''):
continue
provider.refresh_provider_relationships()
wait_for_retval = wait_for(lambda: get_table_attr(random_image_instance, tbl, attr),
message='Trying to get attribute "{}" of table "{}"'.format(
attr, tbl),
delay=5, num_sec=120, silent_failure=True)
if not wait_for_retval:
soft_assert(False, 'Could not get attribute "{}" for "{}" table.'
.format(attr, tbl))
continue
value = wait_for_retval.out
soft_assert(verifier(value),
f'{tbl}.{attr} attribute has unexpected value ({value})')
@pytest.mark.parametrize(('test_item'), TEST_ITEMS)
def test_containers_smartstate_analysis_api(provider, test_item, soft_assert,
delete_all_container_tasks, random_image_instance):
"""
Test initiating a SmartState Analysis scan via the CFME API through the ManageIQ API Client
entity class.
RFE: BZ 1486362
Polarion:
assignee: juwatts
caseimportance: high
casecomponent: Containers
initialEstimate: 1/6h
"""
if test_item.is_openscap:
random_image_instance.assign_policy_profiles('OpenSCAP profile')
else:
random_image_instance.unassign_policy_profiles('OpenSCAP profile')
original_scan = random_image_instance.last_scan_attempt_on
random_image_instance.scan()
task = provider.appliance.collections.tasks.instantiate(
name=f"Container Image Analysis: '{random_image_instance.name}'", tab='AllTasks')
task.wait_for_finished()
soft_assert(original_scan != random_image_instance.last_scan_attempt_on,
'SmartState Anaysis scan has failed')
|
"""
.. currentmodule:: __init__.py
.. moduleauthor:: Pat Daburu <pat@daburu.net>
Provide a brief description of the module.
"""
|
from stopeight import analyzer
version=analyzer.version
from stopeight.util.editor.data import ScribbleData
def legal_segments(data):
from stopeight.matrix import Vectors
from stopeight.analyzer import legal_segments
return legal_segments(Vectors(data)).__array__().view(ScribbleData)
legal_segments.__annotations__ = {'data':ScribbleData,'return':ScribbleData}
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.translation import ugettext as _
class Application(models.Model):
client_id = models.CharField(_('Client ID'), max_length=40, blank=False, primary_key=True)
client_secret = models.CharField(_('Client secret'), max_length=40, blank=False)
name = models.CharField(_('Application Name'), max_length=40, blank=False)
home_url = models.CharField(_('URL'), max_length=255, blank=False)
redirect_uri = models.CharField(_('Redirect URI'), max_length=255, blank=True)
def __unicode__(self):
return unicode(self.client_id)
class Code(models.Model):
client = models.ForeignKey(Application)
user = models.ForeignKey(User)
scope = models.CharField(_('Scope'), max_length=255, blank=True)
code = models.CharField(_('Code'), max_length=255, blank=False)
creation_timestamp = models.CharField(_('Creation timestamp'), max_length=40, blank=False)
expires_in = models.CharField(_('Expires in'), max_length=40, blank=True)
class Meta:
unique_together = ('client', 'code')
def __unicode__(self):
return unicode(self.code)
class Token(models.Model):
token = models.CharField(_('Token'), max_length=40, blank=False, primary_key=True)
client = models.ForeignKey(Application)
user = models.ForeignKey(User)
scope = models.CharField(_('Scope'), max_length=255, blank=True)
token_type = models.CharField(_('Token type'), max_length=10, blank=False)
refresh_token = models.CharField(_('Refresh token'), max_length=40, blank=True)
creation_timestamp = models.CharField(_('Creation timestamp'), max_length=40, blank=False)
expires_in = models.CharField(_('Expires in'), max_length=40, blank=True)
def __unicode__(self):
return unicode(self.token)
@receiver(post_save, sender=Application)
def invalidate_tokens_on_change(sender, instance, created, raw, **kwargs):
if created is False:
instance.token_set.all().update(creation_timestamp='0')
|
import math
import os
import sys
import xml.etree.ElementTree as ETree
import tabulate
from mdutils.mdutils import MdUtils
DEFAULT_INI = {'global': {'unusedholes': 'yes',
'onebitenum': 'no'}}
def sortRegisterAndFillHoles(regName,
fieldNameList,
bitOffsetList,
bitWidthList,
fieldDescList,
enumTypeList,
unusedHoles=True):
# sort the lists, highest offset first
fieldNameList = fieldNameList
bitOffsetList = [int(x) for x in bitOffsetList]
bitWidthList = [int(x) for x in bitWidthList]
fieldDescList = fieldDescList
enumTypeList = enumTypeList
matrix = list(zip(bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList))
matrix.sort(key=lambda x: x[0]) # , reverse=True)
bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList = list(zip(*matrix))
# zip return tuples not lists
fieldNameList = list(fieldNameList)
bitOffsetList = list([int(x) for x in bitOffsetList])
bitWidthList = list([int(x) for x in bitWidthList])
fieldDescList = list(fieldDescList)
enumTypeList = list(enumTypeList)
if unusedHoles:
unUsedCnt = 0
nextFieldStartingPos = 0
# fill up the holes
index = 0
register_width = bitOffsetList[-1] + bitWidthList[-1]
while register_width > nextFieldStartingPos:
if nextFieldStartingPos != bitOffsetList[index]:
newBitWidth = bitOffsetList[index] - nextFieldStartingPos
bitOffsetList.insert(index, nextFieldStartingPos)
fieldNameList.insert(index, 'unused' + str(unUsedCnt))
bitWidthList.insert(index, newBitWidth)
fieldDescList.insert(index, 'unused')
enumTypeList.insert(index, '')
unUsedCnt += 1
nextFieldStartingPos = int(bitOffsetList[index]) + int(bitWidthList[index])
index += 1
return regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList
class documentClass():
def __init__(self, name):
self.name = name
self.memoryMapList = []
def addMemoryMap(self, memoryMap):
self.memoryMapList.append(memoryMap)
class memoryMapClass():
def __init__(self, name):
self.name = name
self.addressBlockList = []
def addAddressBlock(self, addressBlock):
self.addressBlockList.append(addressBlock)
class addressBlockClass():
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = ""
def addRegister(self, reg):
assert isinstance(reg, registerClass)
self.registerList.append(reg)
def setRegisterList(self, registerList):
self.registerList = registerList
def returnAsString(self):
raise NotImplementedError("method returnAsString() is virutal and must be overridden.")
class registerClass():
def __init__(self, name, address, resetValue, size, access, desc, fieldNameList,
bitOffsetList, bitWidthList, fieldDescList, enumTypeList):
assert isinstance(enumTypeList, list), 'enumTypeList is not a list'
self.name = name
self.address = address
self.resetValue = resetValue
self.size = size
self.access = access
self.desc = desc
self.fieldNameList = fieldNameList
self.bitOffsetList = bitOffsetList
self.bitWidthList = bitWidthList
self.fieldDescList = fieldDescList
self.enumTypeList = enumTypeList
class enumTypeClassRegistry():
""" should perhaps be a singleton instead """
def __init__(self):
self.listOfEnums = []
def enumAllReadyExist(self, enum):
for e in self.listOfEnums:
if e.compare(enum):
enum.allReadyExist = True
enum.enumName = e.name
break
self.listOfEnums.append(enum)
return enum
class enumTypeClass():
def __init__(self, name, bitWidth, keyList, valueList, descrList):
self.name = name
self.bitWidth = bitWidth
matrix = list(zip(valueList, keyList, descrList))
matrix.sort(key=lambda x: x[0])
valueList, keyList, descrList = list(zip(*matrix))
self.keyList = list(keyList)
self.valueList = list(valueList)
self.allReadyExist = False
self.enumName = None
self.descrList = descrList
def compare(self, other):
result = True
result = self.bitWidth == other.bitWidth and result
result = self.compareLists(self.keyList, other.keyList) and result
return result
def compareLists(self, list1, list2):
for val in list1:
if val in list2:
return True
return False
class rstAddressBlock(addressBlockClass):
"""Generates a ReStructuredText file from a IP-XACT register description"""
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = ".rst"
def returnEnumValueString(self, enumTypeObj):
if isinstance(enumTypeObj, enumTypeClass):
l = []
for i in range(len(enumTypeObj.keyList)):
l.append(enumTypeObj.keyList[i] + '=' + enumTypeObj.valueList[i])
s = ", ".join(l)
else:
s = ''
return s
def returnAsString(self):
r = ""
regNameList = [reg.name for reg in self.registerList]
regAddressList = [reg.address for reg in self.registerList]
regDescrList = [reg.desc for reg in self.registerList]
r += self.returnRstTitle()
r += self.returnRstSubTitle()
summary_table = []
for i in range(len(regNameList)):
summary_table.append(["%#04x" % regAddressList[i], str(regNameList[i]) + "_", str(regDescrList[i])])
r += tabulate.tabulate(summary_table,
headers=['Address', 'Register Name', 'Description'],
tablefmt="grid")
r += "\n"
r += "\n"
for reg in self.registerList:
r += self.returnRstRegDesc(reg.name, reg.address, reg.size, reg.resetValue, reg.desc, reg.access)
reg_table = []
for fieldIndex in reversed(list(range(len(reg.fieldNameList)))):
bits = "[" + str(reg.bitOffsetList[fieldIndex] + reg.bitWidthList[fieldIndex] - 1) + \
":" + str(reg.bitOffsetList[fieldIndex]) + "]"
_line = [bits,
reg.fieldNameList[fieldIndex]]
if reg.resetValue:
temp = (int(reg.resetValue, 0) >> reg.bitOffsetList[fieldIndex])
mask = (2 ** reg.bitWidthList[fieldIndex]) - 1
temp &= mask
temp = "{value:#0{width}x}".format(value=temp,
width=math.ceil(reg.bitWidthList[fieldIndex] / 4) + 2)
_line.append(temp)
_line.append(reg.fieldDescList[fieldIndex])
reg_table.append(_line)
_headers = ['Bits', 'Field name']
if reg.resetValue:
_headers.append('Reset')
_headers.append('Description')
r += tabulate.tabulate(reg_table,
headers=_headers,
tablefmt="grid")
r += "\n"
r += "\n"
# enumerations
for enum in reg.enumTypeList:
if enum:
# header
r += enum.name + "\n"
r += ',' * len(enum.name) + "\n"
r += "\n"
# table
enum_table = []
for i in range(len(enum.keyList)):
_value = "{value:#0{width}x}".format(value=int(enum.valueList[i], 0),
width=math.ceil(int(enum.bitWidth, 0) / 4) + 2)
_line = [enum.keyList[i],
_value,
enum.descrList[i]]
enum_table.append(_line)
r += tabulate.tabulate(enum_table,
headers=['Name', 'Value', 'Description'],
tablefmt="grid")
r += "\n\n"
return r
def returnRstTitle(self):
r = ''
r += "====================\n"
r += "Register description\n"
r += "====================\n\n"
return r
def returnRstSubTitle(self):
r = ''
r += "Registers\n"
r += "---------\n\n"
return r
def returnRstRegDesc(self, name, address, size, resetValue, desc, access):
r = ""
r += name + "\n"
r += len(name) * '-' + "\n"
r += "\n"
r += ":Name: " + str(name) + "\n"
r += ":Address: " + hex(address) + "\n"
if resetValue:
# display the resetvalue in hex notation in the full length of the register
r += ":Reset Value: {value:#0{size:d}x}\n".format(value=int(resetValue, 0), size=size // 4 + 2)
r += ":Access: " + access + "\n"
r += ":Description: " + desc + "\n"
r += "\n"
return r
class mdAddressBlock(addressBlockClass):
"""Generates a Markdown file from a IP-XACT register description"""
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = ".md"
self.mdFile = MdUtils(file_name="none",
title="")
def returnEnumValueString(self, enumTypeObj):
if isinstance(enumTypeObj, enumTypeClass):
l = []
for i in range(len(enumTypeObj.keyList)):
l.append(enumTypeObj.keyList[i] + '=' + enumTypeObj.valueList[i])
s = ", ".join(l)
else:
s = ''
return s
def returnAsString(self):
regNameList = [reg.name for reg in self.registerList]
regAddressList = [reg.address for reg in self.registerList]
regDescrList = [reg.desc for reg in self.registerList]
self.mdFile.new_header(level=1, title="Register description")
self.mdFile.new_header(level=2, title="Registers")
# summary
header = ['Address', 'Register Name', 'Description']
rows = []
for i in range(len(regNameList)):
rows.extend(["{:#04x}".format(regAddressList[i]),
f"[{regNameList[i]}](#{regNameList[i]})",
str(regDescrList[i])])
self.mdFile.new_table(columns=len(header),
rows=len(regNameList) + 1, # header + data
text=header + rows,
text_align='left')
# all registers
for reg in self.registerList:
headers = ['Bits', 'Field name']
if reg.resetValue:
headers.append('Reset')
headers.append('Description')
self.returnMdRegDesc(reg.name, reg.address, reg.size, reg.resetValue, reg.desc, reg.access)
reg_table = []
for fieldIndex in reversed(list(range(len(reg.fieldNameList)))):
bits = "[" + str(reg.bitOffsetList[fieldIndex] + reg.bitWidthList[fieldIndex] - 1) + \
":" + str(reg.bitOffsetList[fieldIndex]) + "]"
reg_table.append(bits)
reg_table.append(reg.fieldNameList[fieldIndex])
if reg.resetValue:
temp = (int(reg.resetValue, 0) >> reg.bitOffsetList[fieldIndex])
mask = (2 ** reg.bitWidthList[fieldIndex]) - 1
temp &= mask
temp = "{value:#0{width}x}".format(value=temp,
width=math.ceil(reg.bitWidthList[fieldIndex] / 4) + 2)
reg_table.append(temp)
reg_table.append(reg.fieldDescList[fieldIndex])
self.mdFile.new_table(columns=len(headers),
rows=len(reg.fieldNameList) + 1,
text=headers + reg_table,
text_align='left')
# enumerations
for enum in reg.enumTypeList:
if enum:
self.mdFile.new_header(level=4,
title=enum.name)
enum_table = []
for i in range(len(enum.keyList)):
_value = "{value:#0{width}x}".format(value=int(enum.valueList[i], 0),
width=math.ceil(int(enum.bitWidth, 0) / 4) + 2)
enum_table.append(enum.keyList[i])
enum_table.append(_value)
enum_table.append(enum.descrList[i])
headers = ['Name', 'Value', 'Description']
self.mdFile.new_table(columns=len(headers),
rows=len(enum.keyList) + 1,
text=headers + enum_table,
text_align='left')
return self.mdFile.file_data_text
def returnMdRegDesc(self, name, address, size, resetValue, desc, access):
self.mdFile.new_header(level=3, title=name)
self.mdFile.new_line("**Name** " + str(name))
self.mdFile.new_line("**Address** " + hex(address))
if resetValue:
# display the resetvalue in hex notation in the full length of the register
self.mdFile.new_line(
"**Reset Value** {value:#0{size:d}x}".format(value=int(resetValue, 0), size=size // 4 + 2))
self.mdFile.new_line("**Access** " + access)
self.mdFile.new_line("**Description** " + desc)
class vhdlAddressBlock(addressBlockClass):
"""Generates a vhdl file from a IP-XACT register description"""
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = "_vhd_pkg.vhd"
def returnAsString(self):
r = ''
r += self.returnPkgHeaderString()
r += "\n\n"
r += self.returnPkgBodyString()
return r
def returnPkgHeaderString(self):
r = ''
r += "--\n"
r += "-- Automatically generated\n"
r += "-- with the command '%s'\n" % (' '.join(sys.argv))
r += "--\n"
r += "-- Do not manually edit!\n"
r += "--\n"
r += "-- VHDL 93\n"
r += "--\n"
r += "\n"
r += "library ieee;\n"
r += "use ieee.std_logic_1164.all;\n"
r += "use ieee.numeric_std.all;\n"
r += "\n"
r += "package " + self.name + "_vhd_pkg is\n"
r += "\n"
r += " constant addr_width : natural := " + str(self.addrWidth) + ";\n"
r += " constant data_width : natural := " + str(self.dataWidth) + ";\n"
r += "\n\n"
r += self.returnRegFieldEnumTypeStrings(True)
for reg in self.registerList:
r += " constant {name}_addr : natural := {address} ; -- {address:#0{width}x}\n".format(name=reg.name,
address=reg.address,
width=math.ceil(
self.addrWidth / 4) + 2) # +2 for the '0x'
r += "\n"
for reg in self.registerList:
if reg.resetValue:
r += " constant {name}_reset_value : std_ulogic_vector(data_width-1 downto 0) := std_ulogic_vector(to_unsigned({value:d}, data_width)); -- {value:#0{width}x}\n".format(
name=reg.name,
value=int(reg.resetValue, 0),
width=math.ceil((self.dataWidth / 4)) + 2)
r += "\n\n"
for reg in self.registerList:
r += self.returnRegRecordTypeString(reg)
r += self.returnRegistersInRecordTypeString()
r += self.returnRegistersOutRecordTypeString()
r += self.returnRegistersReadFunction()
r += self.returnRegistersWriteFunction()
r += self.returnRegistersResetFunction()
r += "end;\n"
return r
def returnRegFieldEnumTypeStrings(self, prototype):
r = ''
for reg in self.registerList:
for enum in reg.enumTypeList:
if isinstance(enum, enumTypeClass) and not enum.allReadyExist:
r += " -- {}\n".format(enum.name) # group the enums in the package
if prototype:
t = " type " + enum.name + "_enum is ("
indent = t.find('(') + 1
r += t
for ki in range(len(enum.keyList)):
if ki != 0: # no indentation for the first element
r += " " * indent
r += enum.keyList[ki]
if ki != len(enum.keyList) - 1: # no ',' for the last element
r += ","
else: # last element
r += ");"
if enum.descrList[ki]:
r += " -- " + enum.descrList[ki]
if ki != len(enum.keyList) - 1: # no new line for the last element
r += "\n"
r += "\n"
r += " function " + enum.name + \
"_enum_to_sulv(v: " + enum.name + "_enum ) return std_ulogic_vector"
if prototype:
r += ";\n"
else:
r += " is\n"
r += " variable r : std_ulogic_vector(" + str(enum.bitWidth) + "-1 downto 0);\n"
r += " begin\n"
r += " case v is\n"
for i in range(len(enum.keyList)):
r += ' when {key} => r:="{value_int:0{bitwidth}b}"; -- {value}\n'.format(
key=enum.keyList[i],
value=enum.valueList[i],
value_int=int(enum.valueList[i]),
bitwidth=int(enum.bitWidth))
r += " end case;\n"
r += " return r;\n"
r += " end function;\n\n"
r += " function sulv_to_" + enum.name + \
"_enum(v: std_ulogic_vector(" + str(enum.bitWidth) + "-1 downto 0)) return " + \
enum.name + "_enum"
if prototype:
r += ";\n"
else:
r += " is\n"
r += " variable r : " + enum.name + "_enum;\n"
r += " begin\n"
r += " case v is\n"
for i in range(len(enum.keyList)):
r += ' when "{value_int:0{bitwidth}b}" => r:={key};\n'.format(key=enum.keyList[i],
value_int=int(
enum.valueList[
i]),
bitwidth=int(
enum.bitWidth))
r += ' when others => r:=' + enum.keyList[0] + '; -- error\n'
r += " end case;\n"
r += " return r;\n"
r += " end function;\n\n"
if prototype:
r += "\n"
if prototype:
r += "\n"
return r
def returnRegRecordTypeString(self, reg):
r = ''
r += " type " + reg.name + "_record_type is record\n"
for i in reversed(list(range(len(reg.fieldNameList)))):
bits = "[" + str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + ":" + str(reg.bitOffsetList[i]) + "]"
bit = "[" + str(reg.bitOffsetList[i]) + "]"
if isinstance(reg.enumTypeList[i], enumTypeClass):
if not reg.enumTypeList[i].allReadyExist:
r += " " + reg.fieldNameList[i] + " : " + \
reg.enumTypeList[i].name + "_enum; -- " + bits + "\n"
else:
r += " " + reg.fieldNameList[i] + " : " + \
reg.enumTypeList[i].enumName + "_enum; -- " + bits + "\n"
else:
if reg.bitWidthList[i] == 1: # single bit
r += " " + reg.fieldNameList[i] + " : std_ulogic; -- " + bit + "\n"
else: # vector
r += " " + reg.fieldNameList[i] + " : std_ulogic_vector(" + str(reg.bitWidthList[i] - 1) + \
" downto 0); -- " + bits + "\n"
r += " end record;\n\n"
return r
def returnRegistersInRecordTypeString(self):
r = ""
r += " type " + self.name + "_in_record_type is record\n"
for reg in self.registerList:
if reg.access == "read-only":
r += " {name} : {name}_record_type; -- addr {addr:#0{width}x}\n".format(name=reg.name,
addr=reg.address,
width=math.ceil(
self.addrWidth / 4) + 2) # +2 for the '0x'
r += " end record;\n\n"
return r
def returnRegistersOutRecordTypeString(self):
r = ""
r += " type " + self.name + "_out_record_type is record\n"
for reg in self.registerList:
if reg.access != "read-only":
r += " {name} : {name}_record_type; -- addr {addr:#0{width}x}\n".format(name=reg.name,
addr=reg.address,
width=math.ceil(
self.addrWidth / 4) + 2) # +2 for the '0x'
r += " end record;\n\n"
return r
def returnRegistersReadFunction(self):
r = " function read_" + self.name + "(registers_i : " + self.name + "_in_record_type;\n"
indent = r.find('(') + 1
r += " " * indent + "registers_o : " + self.name + "_out_record_type;\n"
r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0)\n"
r += " " * indent + ") return std_ulogic_vector;\n\n"
return r
def returnRegistersWriteFunction(self):
r = " function write_" + self.name + "(value : std_ulogic_vector(data_width-1 downto 0);\n"
indent = r.find('(') + 1
r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0);\n"
r += " " * indent + "registers_o : " + self.name + "_out_record_type\n"
r += " " * indent + ") return " + self.name + "_out_record_type;\n\n"
return r
def returnRegistersResetFunction(self):
r = " function reset_" + self.name + " return " + self.name + "_out_record_type;\n"
r += " function reset_" + self.name + "(address: std_ulogic_vector(addr_width-1 downto 0);\n"
indent = r.splitlines()[-1].find('(') + 1
r += " " * indent + "registers_o : " + self.name + "_out_record_type\n"
r += " " * indent + ") return " + self.name + "_out_record_type;\n\n"
return r
def returnRecToSulvFunctionString(self, reg):
r = ""
r += " function " + reg.name + \
"_record_type_to_sulv(v : " + reg.name + "_record_type) return std_ulogic_vector is\n"
r += " variable r : std_ulogic_vector(data_width-1 downto 0);\n"
r += " begin\n"
r += " r := (others => '0');\n"
for i in reversed(list(range(len(reg.fieldNameList)))):
bits = str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + " downto " + str(reg.bitOffsetList[i])
bit = str(reg.bitOffsetList[i])
if isinstance(reg.enumTypeList[i], enumTypeClass):
if not reg.enumTypeList[i].allReadyExist:
r += " r(" + bits + ") := " + \
reg.enumTypeList[i].name + "_enum_to_sulv(v." + reg.fieldNameList[i] + ");\n"
else:
r += " r(" + bits + ") := " + \
reg.enumTypeList[i].enumName + "_enum_to_sulv(v." + reg.fieldNameList[i] + ");\n"
else:
if reg.bitWidthList[i] == 1: # single bit
r += " r(" + bit + ") := v." + reg.fieldNameList[i] + ";\n"
else: # vector
r += " r(" + bits + ") := v." + reg.fieldNameList[i] + ";\n"
r += " return r;\n"
r += " end function;\n\n"
return r
def returnSulvToRecFunctionString(self, reg):
r = ""
r += " function sulv_to_" + reg.name + \
"_record_type(v : std_ulogic_vector) return " + reg.name + "_record_type is\n"
r += " variable r : " + reg.name + "_record_type;\n"
r += " begin\n"
for i in reversed(list(range(len(reg.fieldNameList)))):
bits = str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + " downto " + str(reg.bitOffsetList[i])
bit = str(reg.bitOffsetList[i])
if isinstance(reg.enumTypeList[i], enumTypeClass):
if not reg.enumTypeList[i].allReadyExist:
r += " r." + reg.fieldNameList[i] + " := sulv_to_" + \
reg.enumTypeList[i].name + "_enum(v(" + bits + "));\n"
else:
r += " r." + reg.fieldNameList[i] + " := sulv_to_" + \
reg.enumTypeList[i].enumName + "_enum(v(" + bits + "));\n"
else:
if reg.bitWidthList[i] == 1: # single bit
r += " r." + reg.fieldNameList[i] + " := v(" + bit + ");\n"
else:
r += " r." + reg.fieldNameList[i] + " := v(" + bits + ");\n"
r += " return r;\n"
r += " end function;\n\n"
return r
def returnReadFunctionString(self):
r = ""
t = " function read_" + self.name + "(registers_i : " + self.name + "_in_record_type;\n"
indent = t.find('(') + 1
r += t
r += " " * indent + "registers_o : " + self.name + "_out_record_type;\n"
r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0)\n"
r += " " * indent + ") return std_ulogic_vector is\n"
r += " variable r : std_ulogic_vector(data_width-1 downto 0);\n"
r += " begin\n"
r += " case to_integer(unsigned(address)) is\n"
for reg in self.registerList:
if reg.access == "read-only":
r += " when " + reg.name + "_addr => r:= " + reg.name + \
"_record_type_to_sulv(registers_i." + reg.name + ");\n"
else:
r += " when " + reg.name + "_addr => r:= " + reg.name + \
"_record_type_to_sulv(registers_o." + reg.name + ");\n"
r += " when others => r := (others => '0');\n"
r += " end case;\n"
r += " return r;\n"
r += " end function;\n\n"
return r
def returnWriteFunctionString(self):
r = ""
t = " function write_" + self.name + "(value : std_ulogic_vector(data_width-1 downto 0);\n"
r += t
indent = t.find('(') + 1
r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0);\n"
r += " " * indent + "registers_o : " + self.name + "_out_record_type\n"
r += " " * indent + ") return " + self.name + "_out_record_type is\n"
r += " variable r : " + self.name + "_out_record_type;\n"
r += " begin\n"
r += " r := registers_o;\n"
r += " case to_integer(unsigned(address)) is\n"
for reg in self.registerList:
if reg.access != "read-only":
r += " when " + reg.name + "_addr => r." + reg.name + \
" := sulv_to_" + reg.name + "_record_type(value);\n"
r += " when others => null;\n"
r += " end case;\n"
r += " return r;\n"
r += " end function;\n\n"
return r
def returnResetFunctionString(self):
r = ""
r += " function reset_" + self.name + " return " + self.name + "_out_record_type is\n"
r += " variable r : " + self.name + "_out_record_type;\n"
r += " begin\n"
for reg in self.registerList:
if reg.resetValue:
if reg.access != "read-only":
r += " r." + reg.name + " := sulv_to_" + \
reg.name + "_record_type(" + reg.name + "_reset_value);\n"
r += " return r;\n"
r += " end function;\n"
r += "\n"
r += " function reset_" + self.name + "(address: std_ulogic_vector(addr_width-1 downto 0);\n"
indent = r.splitlines()[-1].find('(') + 1
r += " " * indent + "registers_o : " + self.name + "_out_record_type\n"
r += " " * indent + ") return " + self.name + "_out_record_type is\n"
r += " variable r : " + self.name + "_out_record_type;\n"
r += " begin\n"
r += " r := registers_o;\n"
r += " case to_integer(unsigned(address)) is\n"
for reg in self.registerList:
if reg.resetValue:
if reg.access != "read-only":
r += " when " + reg.name + "_addr => r." + reg.name + \
" := sulv_to_" + reg.name + "_record_type(" + reg.name + "_reset_value);\n"
r += " when others => null;\n"
r += " end case;\n"
r += " return r;\n"
r += " end function;\n\n"
return r
def returnPkgBodyString(self):
r = ""
r += "package body " + self.name + "_vhd_pkg is\n\n"
r += self.returnRegFieldEnumTypeStrings(False)
for reg in self.registerList:
r += self.returnRecToSulvFunctionString(reg)
r += self.returnSulvToRecFunctionString(reg)
r += self.returnReadFunctionString()
r += self.returnWriteFunctionString()
r += self.returnResetFunctionString()
r += "end package body;\n"
return r
class systemVerilogAddressBlock(addressBlockClass):
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = "_sv_pkg.sv"
def returnIncludeString(self):
r = "\n"
r += "`define " + self.name + "_addr_width " + str(self.addrWidth) + "\n"
r += "`define " + self.name + "_data_width " + str(self.dataWidth) + "\n"
return r
def returnSizeString(self):
r = "\n"
r += "const int addr_width = " + str(self.addrWidth) + ";\n"
r += "const int data_width = " + str(self.dataWidth) + ";\n"
return r
def returnAddressesString(self):
r = "\n"
for reg in self.registerList:
r += "const int " + reg.name + "_addr = " + str(reg.address) + ";\n"
r += "\n"
return r
def returnAddressListString(self):
r = "\n"
r = "//synopsys translate_off\n"
r += "const int " + self.name + "_regAddresses [" + str(len(self.registerList)) + "] = '{"
l = []
for reg in self.registerList:
l.append("\n " + reg.name + "_addr")
r += ",".join(l)
r += "};\n"
r += "\n"
r += "const string " + self.name + "_regNames [" + str(len(self.registerList)) + "] = '{"
l = []
for reg in self.registerList:
l.append('\n "' + reg.name + '"')
r += ",".join(l)
r += "};\n"
r += "const reg " + self.name + "_regUnResetedAddresses [" + str(len(self.registerList)) + "] = '{"
l = []
for reg in self.registerList:
if reg.resetValue:
l.append("\n 1'b0")
else:
l.append("\n 1'b1")
r += ",".join(l)
r += "};\n"
r += "\n"
r += "//synopsys translate_on\n\n"
return r
def enumeratedType(self, prepend, fieldName, valueNames, values):
r = "\n"
members = []
# dont want to create to simple names in the global names space.
# should preppend with name from ipxact file
for index in range(len(valueNames)):
name = valueNames[index]
value = values[index]
members.append(name + "=" + value)
r += "typedef enum { " + ",".join(members) + "} enum_" + fieldName + ";\n"
return r
def returnResetValuesString(self):
r = ""
for reg in self.registerList:
if reg.resetValue:
r += "const " + reg.name + "_struct_type " + reg.name + \
"_reset_value = " + str(int(reg.resetValue, 0)) + ";\n"
r += "\n"
return r
def returnStructString(self):
r = "\n"
for reg in self.registerList:
r += "\ntypedef struct packed {\n"
for i in reversed(list(range(len(reg.fieldNameList)))):
bits = "bits [" + str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + \
":" + str(reg.bitOffsetList[i]) + "]"
r += " bit [" + str(reg.bitWidthList[i] - 1) + ":0] " + \
str(reg.fieldNameList[i]) + ";//" + bits + "\n"
r += "} " + reg.name + "_struct_type;\n\n"
return r
def returnRegistersStructString(self):
r = "typedef struct packed {\n"
for reg in self.registerList:
r += " " + reg.name + "_struct_type " + reg.name + ";\n"
r += "} " + self.name + "_struct_type;\n\n"
return r
def returnReadFunctionString(self):
r = "function bit [31:0] read_" + self.name + "(" + self.name + "_struct_type registers,int address);\n"
r += " bit [31:0] r;\n"
r += " case(address)\n"
for reg in self.registerList:
r += " " + reg.name + "_addr: r[$bits(registers." + reg.name + ")-1:0] = registers." + reg.name + ";\n"
r += " default: r =0;\n"
r += " endcase\n"
r += " return r;\n"
r += "endfunction\n\n"
return r
def returnWriteFunctionString(self):
t = "function " + self.name + "_struct_type write_" + self.name + "(bit [31:0] data, int address,\n"
r = t
indent = r.find('(') + 1
r += " " * indent + self.name + "_struct_type registers);\n"
r += " " + self.name + "_struct_type r;\n"
r += " r = registers;\n"
r += " case(address)\n"
for reg in self.registerList:
r += " " + reg.name + "_addr: r." + reg.name + " = data[$bits(registers." + reg.name + ")-1:0];\n"
r += " endcase // case address\n"
r += " return r;\n"
r += "endfunction\n\n"
return r
def returnResetFunctionString(self):
r = "function " + self.name + "_struct_type reset_" + self.name + "();\n"
r += " " + self.name + "_struct_type r;\n"
for reg in self.registerList:
if reg.resetValue:
r += " r." + reg.name + "=" + reg.name + "_reset_value;\n"
r += " return r;\n"
r += "endfunction\n"
r += "\n"
return r
def returnAsString(self):
r = ''
r += "// Automatically generated\n"
r += "// with the command '%s'\n" % (' '.join(sys.argv))
r += "//\n"
r += "// Do not manually edit!\n"
r += "//\n"
r += "package " + self.name + "_sv_pkg;\n\n"
r += self.returnSizeString()
r += self.returnAddressesString()
r += self.returnAddressListString()
r += self.returnStructString()
r += self.returnResetValuesString()
r += self.returnRegistersStructString()
r += self.returnReadFunctionString()
r += self.returnWriteFunctionString()
r += self.returnResetFunctionString()
r += "endpackage //" + self.name + "_sv_pkg\n"
return r
class ipxactParser():
def __init__(self, srcFile, config):
self.srcFile = srcFile
self.config = config
self.enumTypeClassRegistry = enumTypeClassRegistry()
def returnDocument(self):
spirit_ns = 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.5'
tree = ETree.parse(self.srcFile)
ETree.register_namespace('spirit', spirit_ns)
namespace = tree.getroot().tag[1:].split("}")[0]
spiritString = '{' + spirit_ns + '}'
docName = tree.find(spiritString + "name").text
d = documentClass(docName)
memoryMaps = tree.find(spiritString + "memoryMaps")
memoryMapList = memoryMaps.findall(spiritString + "memoryMap") if memoryMaps is not None else []
for memoryMap in memoryMapList:
memoryMapName = memoryMap.find(spiritString + "name").text
addressBlockList = memoryMap.findall(spiritString + "addressBlock")
m = memoryMapClass(memoryMapName)
for addressBlock in addressBlockList:
addressBlockName = addressBlock.find(spiritString + "name").text
registerList = addressBlock.findall(spiritString + "register")
baseAddress = int(addressBlock.find(spiritString + "baseAddress").text, 0)
nbrOfAddresses = int(addressBlock.find(spiritString + "range").text, 0) # TODO, this is wrong
addrWidth = int(math.ceil((math.log(baseAddress + nbrOfAddresses, 2))))
dataWidth = int(addressBlock.find(spiritString + "width").text, 0)
a = addressBlockClass(addressBlockName, addrWidth, dataWidth)
for registerElem in registerList:
regName = registerElem.find(spiritString + "name").text
reset = registerElem.find(spiritString + "reset")
if reset is not None:
resetValue = reset.find(spiritString + "value").text
else:
resetValue = None
size = int(registerElem.find(spiritString + "size").text, 0)
access = registerElem.find(spiritString + "access").text
if registerElem.find(spiritString + "description") != None:
desc = registerElem.find(spiritString + "description").text
else:
desc = ""
regAddress = baseAddress + int(registerElem.find(spiritString + "addressOffset").text, 0)
r = self.returnRegister(spiritString, registerElem, regAddress,
resetValue, size, access, desc, dataWidth)
a.addRegister(r)
m.addAddressBlock(a)
d.addMemoryMap(m)
return d
def returnRegister(self, spiritString, registerElem, regAddress, resetValue, size, access, regDesc, dataWidth):
regName = registerElem.find(spiritString + "name").text
fieldList = registerElem.findall(spiritString + "field")
fieldNameList = [item.find(spiritString + "name").text for item in fieldList]
bitOffsetList = [item.find(spiritString + "bitOffset").text for item in fieldList]
bitWidthList = [item.find(spiritString + "bitWidth").text for item in fieldList]
fieldDescList = [item.find(spiritString + "description").text for item in fieldList]
enumTypeList = []
for index in range(len(fieldList)):
fieldElem = fieldList[index]
bitWidth = bitWidthList[index]
fieldName = fieldNameList[index]
enumeratedValuesElem = fieldElem.find(spiritString + "enumeratedValues")
if enumeratedValuesElem is not None:
enumeratedValueList = enumeratedValuesElem.findall(spiritString + "enumeratedValue")
valuesNameList = [item.find(spiritString + "name").text for item in enumeratedValueList]
descrList = [item.find(spiritString + "description").text if item.find(
spiritString + "description") is not None else "" for item in enumeratedValueList]
valuesList = [item.find(spiritString + "value").text for item in enumeratedValueList]
if len(valuesNameList) > 0:
if int(bitWidth) > 1: # if the field of a enum is longer than 1 bit, always use enums
enum = enumTypeClass(fieldName, bitWidth, valuesNameList, valuesList, descrList)
enum = self.enumTypeClassRegistry.enumAllReadyExist(enum)
enumTypeList.append(enum)
else: # bit field of 1 bit
if self.config['global'].getboolean('onebitenum'): # do create one bit enums
enum = enumTypeClass(fieldName, bitWidth, valuesNameList, valuesList, descrList)
enum = self.enumTypeClassRegistry.enumAllReadyExist(enum)
enumTypeList.append(enum)
else: # dont create enums of booleans because this only decreases readability
enumTypeList.append(None)
else:
enumTypeList.append(None)
else:
enumTypeList.append(None)
if len(fieldNameList) == 0:
fieldNameList.append(regName)
bitOffsetList.append(0)
bitWidthList.append(dataWidth)
fieldDescList.append('')
enumTypeList.append(None)
(regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList) = sortRegisterAndFillHoles(
regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList,
self.config['global'].getboolean('unusedholes'))
reg = registerClass(regName, regAddress, resetValue, size, access, regDesc, fieldNameList,
bitOffsetList, bitWidthList, fieldDescList, enumTypeList)
return reg
class ipxact2otherGenerator():
def __init__(self, destDir, namingScheme="addressBlockName"):
self.destDir = destDir
self.namingScheme = namingScheme
def write(self, fileName, string):
_dest = os.path.join(self.destDir, fileName)
print("writing file " + _dest)
if not os.path.exists(os.path.dirname(_dest)):
os.makedirs(os.path.dirname(_dest))
with open(_dest, "w") as f:
f.write(string)
def generate(self, generatorClass, document):
self.document = document
docName = document.name
for memoryMap in document.memoryMapList:
mapName = memoryMap.name
for addressBlock in memoryMap.addressBlockList:
blockName = addressBlock.name
block = generatorClass(addressBlock.name,
addressBlock.addrWidth,
addressBlock.dataWidth)
block.setRegisterList(addressBlock.registerList)
s = block.returnAsString()
if self.namingScheme == "addressBlockName":
fileName = blockName + block.suffix
else:
fileName = docName + '_' + mapName + '_' + blockName + block.suffix
self.write(fileName, s)
if generatorClass == systemVerilogAddressBlock:
includeFileName = fileName + "h"
includeString = block.returnIncludeString()
self.write(includeFileName, includeString)
|
from scriptLattes import *
from geradorDePaginasWeb import *
import re
class OutroTipoDeProducaoBibliografica:
item = None # dado bruto
idMembro = None
relevante = None
autores = None
titulo = None
ano = None
natureza = None # tipo de producao
chave = None
def __init__(self, idMembro, partesDoItem='', relevante=''):
self.idMembro = set([])
self.idMembro.add(idMembro)
if not partesDoItem=='':
# partesDoItem[0]: Numero (NAO USADO)
# partesDoItem[1]: Descricao do livro (DADO BRUTO)
self.relevante = relevante
self.item = partesDoItem[1]
# Dividir o item na suas partes constituintes
partes = self.item.partition(" . ")
self.autores = partes[0].strip()
partes = partes[2]
aux = re.findall(u' \((.*?)\)\.$', partes)
if len(aux)>0:
self.natureza = aux[-1]
partes = partes.rpartition(" (")
partes = partes[0]
else:
self.natureza = ''
aux = re.findall(u' ((?:19|20)\d\d)\\b', partes)
if len(aux)>0:
self.ano = aux[-1] #.strip().rstrip(".").rstrip(",")
partes = partes.rpartition(" ")
partes = partes[0]
else:
self.ano = ''
self.titulo = partes.strip().rstrip(".").rstrip(",")
self.chave = self.autores # chave de comparação entre os objetos
else:
self.relevante = ''
self.autores = ''
self.titulo = ''
self.ano = ''
self.natureza = ''
def compararCom(self, objeto):
if self.idMembro.isdisjoint(objeto.idMembro) and compararCadeias(self.titulo, objeto.titulo):
# Os IDs dos membros são agrupados.
# Essa parte é importante para a criação do GRAFO de colaborações
self.idMembro.update(objeto.idMembro)
if len(self.autores)<len(objeto.autores):
self.autores = objeto.autores
if len(self.titulo)<len(objeto.titulo):
self.titulo = objeto.titulo
if len(self.natureza)<len(objeto.natureza):
self.natureza = objeto.natureza
return self
else: # nao similares
return None
def html(self, listaDeMembros):
s = self.autores + '. <b>' + self.titulo + '</b>. '
s+= str(self.ano) + '. ' if str(self.ano).isdigit() else '. '
s+= self.natureza if not self.natureza=='' else ''
s+= menuHTMLdeBuscaPB(self.titulo)
return s
# ------------------------------------------------------------------------ #
def __str__(self):
s = "\n[OUTRO TIPO DE PRODUCAO BIBLIOGRAFICA] \n"
s += "+ID-MEMBRO : " + str(self.idMembro) + "\n"
s += "+RELEVANTE : " + str(self.relevante) + "\n"
s += "+AUTORES : " + self.autores.encode('utf8','replace') + "\n"
s += "+TITULO : " + self.titulo.encode('utf8','replace') + "\n"
s += "+ANO : " + str(self.ano) + "\n"
s += "+NATUREZA : " + self.natureza.encode('utf8','replace') + "\n"
s += "+item : " + self.item.encode('utf8','replace') + "\n"
return s
|
"""This test module contains tests for the migration system."""
import os
import subprocess
import unittest
REPO_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
class TestAlembic(unittest.TestCase):
"""This test class contains tests pertaining to alembic."""
def test_alembic_history(self):
"""Enforce a linear alembic history.
This test runs the `alembic history | grep ' (head), '` command,
and ensure it returns only one line.
"""
proc1 = subprocess.Popen(
["alembic", "history"], cwd=REPO_PATH, stdout=subprocess.PIPE
)
proc2 = subprocess.Popen(
["grep", " (head), "], stdin=proc1.stdout, stdout=subprocess.PIPE
)
stdout = proc2.communicate()[0]
stdout = stdout.strip().split(b"\n")
self.assertEqual(len(stdout), 1)
proc1.communicate()
|
import pybedtools
import os
testdir = os.path.dirname(__file__)
test_tempdir = os.path.join(os.path.abspath(testdir), 'tmp')
unwriteable = os.path.join(os.path.abspath(testdir), 'unwriteable')
def setup():
if not os.path.exists(test_tempdir):
os.system('mkdir -p %s' % test_tempdir)
pybedtools.set_tempdir(test_tempdir)
def teardown():
if os.path.exists(test_tempdir):
os.system('rm -r %s' % test_tempdir)
pybedtools.cleanup()
|
__all__ = ["LinkBox"]
import logging
_LOG = logging.getLogger(".widgets.linkbox")
from gi.repository import GObject
from gi.repository import Gtk
class LinkBox(Gtk.HBox):
def __init__(self, link, button):
GObject.GObject.__init__(self)
self.set_spacing(6)
self.pack_start(link, False, True, 0)
if button:
self.pack_start(button, False, True, 0)
self.show()
|
"""Unit tests for the ranking engine."""
__revision__ = "$Id$"
from invenio.importutils import lazy_import
from invenio.testutils import make_test_suite, run_test_suite, InvenioTestCase
bibrank_tag_based_indexer = lazy_import('invenio.bibrank_tag_based_indexer')
split_ranges = lazy_import('invenio.bibrank:split_ranges')
class TestListSetOperations(InvenioTestCase):
"""Test list set operations."""
def test_union_dicts(self):
"""bibrank tag based indexer - union dicts"""
self.assertEqual({1: 5, 2: 6, 3: 9, 4: 10, 10: 1}, bibrank_tag_based_indexer.union_dicts({1: 5, 2: 6, 3: 9}, {3:9, 4:10, 10: 1}))
def test_split_ranges(self):
"""bibrank tag based indexer - split ranges"""
self.assertEqual([[0, 500], [600, 1000]], split_ranges("0-500,600-1000"))
TEST_SUITE = make_test_suite(TestListSetOperations,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
def spaceship_building(cans):
total_cans = 0
for week in range(1,53):
total_cans = total_cans + cans
print('Week %s = %s cans' % (week, total_cans))
spaceship_building(2)
spaceship_building(13)
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('emailer', '0007_auto_20150509_1922'),
]
operations = [
migrations.AlterField(
model_name='email',
name='recipient',
field=models.EmailField(db_index=True, max_length=254),
),
]
|
import MySQLdb
class DatabaseHandler:
def __init__(self):
pass
def is_delete(self, tableName):
reservedTableNameList = ["mantis_user_table", "mantis_tokens_table", "mantis_config_table"]
isDeleteFlag = 1
for name in reservedTableNameList:
isIdentical = cmp(tableName, name)
if isIdentical == 0:
isDeleteFlag = 0
break
return isDeleteFlag
def Clean_Database(self, hostUrl, account, password, databaseName):
print 'clean database1'
db = MySQLdb.connect(host=hostUrl, user=account, passwd=password, db=databaseName)
cursor = db.cursor()
cursor.execute("Show Tables from " + databaseName)
result = cursor.fetchall()
for record in result:
tableName = record[0]
isDelete = self.is_delete(tableName)
if isDelete == 0:
print "Reserve " + tableName
else :
print "TRUNCATE TABLE `" + tableName + "`"
cursor.execute("TRUNCATE TABLE `" + tableName + "`")
print 'Add admin'
cursor.execute("INSERT INTO `account` VALUES (1, 'admin', 'admin', 'example@ezScrum.tw', '21232f297a57a5a743894a0e4a801fc3', 1, 1379910191599, 1379910191599)")
cursor.execute("INSERT INTO `system` VALUES (1, 1)")
db.commit()
|
from __future__ import print_function
import time
import bugzilla
URL = "partner-bugzilla.redhat.com"
bzapi = bugzilla.Bugzilla(URL)
query = bzapi.build_query(
product="Fedora",
component="python-bugzilla")
query["status"] = "CLOSED"
t1 = time.time()
bugs = bzapi.query(query)
t2 = time.time()
print("Found %d bugs with our query" % len(bugs))
print("Query processing time: %s" % (t2 - t1))
query = bzapi.build_query(
product="Fedora",
component="python-bugzilla",
include_fields=["id", "summary"])
t1 = time.time()
bugs = bzapi.query(query)
t2 = time.time()
print("Quicker query processing time: %s" % (t2 - t1))
query = bzapi.url_to_query("https://partner-bugzilla.redhat.com/"
"buglist.cgi?classification=Fedora&component=python-bugzilla&"
"f1=creation_ts&o1=lessthaneq&order=Importance&product=Fedora&"
"query_format=advanced&v1=2010-01-01")
query["include_fields"] = ["id", "summary"]
bugs = bzapi.query(query)
print("The URL query returned 22 bugs... "
"I know that without even checking because it shouldn't change!... "
"(count is %d)" % len(bugs))
|
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from .. import HasGrampsId
class HasIdOf(HasGrampsId):
"""Rule that checks for a person with a specific GRAMPS ID"""
name = _('Person with <Id>')
description = _("Matches person with a specified Gramps ID")
|
import string
import socket
import base64
import sys
class message:
def __init__(self, name="generate" ):
if name == "generate":
self.name=socket.gethostname()
else:
self.name=name
self.type="gc"
self.decoded=""
def set ( self, content=" " ):
base64content = base64.b64encode ( content )
self.decoded="piratebox;"+ self.type + ";01;" + self.name + ";" + base64content
def get ( self ):
# TODO Split decoded part
message_parts = string.split ( self.decoded , ";" )
if message_parts[0] != "piratebox":
return None
b64_content_part = message_parts[4]
content = base64.b64decode ( b64_content_part )
return content
def get_sendername (self):
return self.name
def get_message ( self ):
return self.decoded
def set_message ( self , decoded):
self.decoded = decoded
class shoutbox_message(message):
def __init__(self, name="generate" ):
message.__init__( self , name)
self.type="sb"
|
import sys
import re
import codecs
from collections import Counter
filename = sys.argv[1]
with codecs.open(filename, encoding='utf-8') as f:
text = f.read()
m = re.findall(r'^#{2,3} .*$', text, re.MULTILINE)
def title(s):
return re.sub(r'#+ ', '', s)
def fragment(s):
return '#' + re.sub(r'[^a-z-]', '', re.sub(r'#+ ', '', s).replace(' ', '-').lower())
def depth(s):
return len(re.match(r'(#*)', s).group(0))
c = Counter()
toc = []
for header in m:
t = title(header)
f = fragment(header)
d = depth(header)
if c[f] > 0:
toc.append('{}- [{}]({}-{})'.format('\t'*(d-2), t, f, c[f]))
else:
toc.append('{}- [{}]({})'.format('\t'*(d-2), t, f))
c[f] += 1
with codecs.open(filename, 'w', encoding='utf-8') as f:
f.write(text.replace('[TOC]', '\n'.join(toc)))
|
"""
pygments.plugin
~~~~~~~~~~~~~~~
Pygments setuptools plugin interface. The methods defined
here also work if setuptools isn't installed but they just
return nothing.
lexer plugins::
[pygments.lexers]
yourlexer = yourmodule:YourLexer
formatter plugins::
[pygments.formatters]
yourformatter = yourformatter:YourFormatter
/.ext = yourformatter:YourFormatter
As you can see, you can define extensions for the formatter
with a leading slash.
syntax plugins::
[pygments.styles]
yourstyle = yourstyle:YourStyle
filter plugin::
[pygments.filter]
yourfilter = yourfilter:YourFilter
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
try:
import pkg_resources
except ImportError:
pkg_resources = None
LEXER_ENTRY_POINT = 'pygments.lexers'
FORMATTER_ENTRY_POINT = 'pygments.formatters'
STYLE_ENTRY_POINT = 'pygments.styles'
FILTER_ENTRY_POINT = 'pygments.filters'
def find_plugin_lexers():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(LEXER_ENTRY_POINT):
yield entrypoint.load()
def find_plugin_formatters():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(FORMATTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_styles():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(STYLE_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_filters():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(FILTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
|
import logging
from borgmatic.borg.flags import make_flags, make_flags_from_arguments
from borgmatic.execute import execute_command
logger = logging.getLogger(__name__)
BORG_EXCLUDE_CHECKPOINTS_GLOB = '*[0123456789]'
def resolve_archive_name(repository, archive, storage_config, local_path='borg', remote_path=None):
'''
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
path, and a remote Borg path, simply return the archive name. But if the archive name is
"latest", then instead introspect the repository for the latest successful (non-checkpoint)
archive, and return its name.
Raise ValueError if "latest" is given but there are no archives in the repository.
'''
if archive != "latest":
return archive
lock_wait = storage_config.get('lock_wait', None)
full_command = (
(local_path, 'list')
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ make_flags('remote-path', remote_path)
+ make_flags('lock-wait', lock_wait)
+ make_flags('glob-archives', BORG_EXCLUDE_CHECKPOINTS_GLOB)
+ make_flags('last', 1)
+ ('--short', repository)
)
output = execute_command(full_command, output_log_level=None, borg_local_path=local_path)
try:
latest_archive = output.strip().splitlines()[-1]
except IndexError:
raise ValueError('No archives found in the repository')
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
return latest_archive
def list_archives(repository, storage_config, list_arguments, local_path='borg', remote_path=None):
'''
Given a local or remote repository path, a storage config dict, and the arguments to the list
action, display the output of listing Borg archives in the repository or return JSON output. Or,
if an archive name is given, listing the files in that archive.
'''
lock_wait = storage_config.get('lock_wait', None)
if list_arguments.successful:
list_arguments.glob_archives = BORG_EXCLUDE_CHECKPOINTS_GLOB
full_command = (
(local_path, 'list')
+ (
('--info',)
if logger.getEffectiveLevel() == logging.INFO and not list_arguments.json
else ()
)
+ (
('--debug', '--show-rc')
if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json
else ()
)
+ make_flags('remote-path', remote_path)
+ make_flags('lock-wait', lock_wait)
+ make_flags_from_arguments(
list_arguments, excludes=('repository', 'archive', 'paths', 'successful')
)
+ (
'::'.join((repository, list_arguments.archive))
if list_arguments.archive
else repository,
)
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
)
return execute_command(
full_command,
output_log_level=None if list_arguments.json else logging.WARNING,
borg_local_path=local_path,
)
|
import sys
import time
import optparse
from email.Charset import Charset
from mailman import MailList
from mailman import Utils
from mailman.app.requests import handle_request
from mailman.configuration import config
from mailman.core.i18n import _
from mailman.email.message import UserNotification
from mailman.initialize import initialize
from mailman.interfaces.requests import IListRequests, RequestType
from mailman.version import MAILMAN_VERSION
import signal
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
NL = u'\n'
now = time.time()
def parseargs():
parser = optparse.OptionParser(version=MAILMAN_VERSION,
usage=_("""\
%prog [options]
Check for pending admin requests and mail the list owners if necessary."""))
parser.add_option('-C', '--config',
help=_('Alternative configuration file to use'))
opts, args = parser.parse_args()
if args:
parser.print_help()
print(_('Unexpected arguments'), file=sys.stderr)
sys.exit(1)
return opts, args, parser
def pending_requests(mlist):
# Must return a byte string
lcset = mlist.preferred_language.charset
pending = []
first = True
requestsdb = IListRequests(mlist)
for request in requestsdb.of_type(RequestType.subscription):
if first:
pending.append(_('Pending subscriptions:'))
first = False
key, data = requestsdb.get_request(request.id)
when = data['when']
addr = data['addr']
fullname = data['fullname']
passwd = data['passwd']
digest = data['digest']
lang = data['lang']
if fullname:
if isinstance(fullname, unicode):
fullname = fullname.encode(lcset, 'replace')
fullname = ' (%s)' % fullname
pending.append(' %s%s %s' % (addr, fullname, time.ctime(when)))
first = True
for request in requestsdb.of_type(RequestType.held_message):
if first:
pending.append(_('\nPending posts:'))
first = False
key, data = requestsdb.get_request(request.id)
when = data['when']
sender = data['sender']
subject = data['subject']
reason = data['reason']
text = data['text']
msgdata = data['msgdata']
subject = Utils.oneline(subject, lcset)
date = time.ctime(when)
reason = _(reason)
pending.append(_("""\
From: $sender on $date
Subject: $subject
Cause: $reason"""))
pending.append('')
# Coerce all items in pending to a Unicode so we can join them
upending = []
charset = mlist.preferred_language.charset
for s in pending:
if isinstance(s, unicode):
upending.append(s)
else:
upending.append(unicode(s, charset, 'replace'))
# Make sure that the text we return from here can be encoded to a byte
# string in the charset of the list's language. This could fail if for
# example, the request was pended while the list's language was French,
# but then it was changed to English before checkdbs ran.
text = NL.join(upending)
charset = Charset(mlist.preferred_language.charset)
incodec = charset.input_codec or 'ascii'
outcodec = charset.output_codec or 'ascii'
if isinstance(text, unicode):
return text.encode(outcodec, 'replace')
# Be sure this is a byte string encodeable in the list's charset
utext = unicode(text, incodec, 'replace')
return utext.encode(outcodec, 'replace')
def auto_discard(mlist):
# Discard old held messages
discard_count = 0
expire = config.days(mlist.max_days_to_hold)
requestsdb = IListRequests(mlist)
heldmsgs = list(requestsdb.of_type(RequestType.held_message))
if expire and heldmsgs:
for request in heldmsgs:
key, data = requestsdb.get_request(request.id)
if now - data['date'] > expire:
handle_request(mlist, request.id, config.DISCARD)
discard_count += 1
mlist.Save()
return discard_count
def midnight(date=None):
if date is None:
date = time.localtime()[:3]
# -1 for dst flag tells the library to figure it out
return time.mktime(date + (0,)*5 + (-1,))
def main():
opts, args, parser = parseargs()
initialize(opts.config)
for name in config.list_manager.names:
# The list must be locked in order to open the requests database
mlist = MailList.MailList(name)
try:
count = IListRequests(mlist).count
# While we're at it, let's evict yesterday's autoresponse data
midnight_today = midnight()
evictions = []
for sender in mlist.hold_and_cmd_autoresponses.keys():
date, respcount = mlist.hold_and_cmd_autoresponses[sender]
if midnight(date) < midnight_today:
evictions.append(sender)
if evictions:
for sender in evictions:
del mlist.hold_and_cmd_autoresponses[sender]
# This is the only place we've changed the list's database
mlist.Save()
if count:
# Set the default language the the list's preferred language.
_.default = mlist.preferred_language
realname = mlist.real_name
discarded = auto_discard(mlist)
if discarded:
count = count - discarded
text = _('Notice: $discarded old request(s) '
'automatically expired.\n\n')
else:
text = ''
if count:
text += Utils.maketext(
'checkdbs.txt',
{'count' : count,
'mail_host': mlist.mail_host,
'adminDB' : mlist.GetScriptURL('admindb',
absolute=1),
'real_name': realname,
}, mlist=mlist)
text += '\n' + pending_requests(mlist)
subject = _('$count $realname moderator '
'request(s) waiting')
else:
subject = _('$realname moderator request check result')
msg = UserNotification(mlist.GetOwnerEmail(),
mlist.GetBouncesEmail(),
subject, text,
mlist.preferred_language)
msg.send(mlist, **{'tomoderators': True})
finally:
mlist.Unlock()
if __name__ == '__main__':
main()
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'InstanceApplication.network'
db.add_column('apply_instanceapplication', 'network', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ganeti.Network'], null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'InstanceApplication.network'
db.delete_column('apply_instanceapplication', 'network_id')
models = {
'apply.instanceapplication': {
'Meta': {'object_name': 'InstanceApplication'},
'admin_contact_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'admin_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'admin_contact_phone': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'backend_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'cluster': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ganeti.Cluster']", 'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'cookie': ('django.db.models.fields.CharField', [], {'default': "'AYkWSa4Fr2'", 'max_length': '255'}),
'disk_size': ('django.db.models.fields.IntegerField', [], {}),
'filed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'hosts_mail_server': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'memory': ('django.db.models.fields.IntegerField', [], {}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ganeti.Network']", 'null': 'True', 'blank': 'True'}),
'operating_system': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['apply.Organization']"}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'vcpus': ('django.db.models.fields.IntegerField', [], {})
},
'apply.organization': {
'Meta': {'object_name': 'Organization'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'apply.sshpublickey': {
'Meta': {'object_name': 'SshPublicKey'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fingerprint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'key_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ganeti.cluster': {
'Meta': {'object_name': 'Cluster'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'fast_create': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5080'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'ganeti.network': {
'Meta': {'object_name': 'Network'},
'cluster': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ganeti.Cluster']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '64'})
}
}
complete_apps = ['apply']
|
"""
Test shows
"""
from __future__ import print_function, unicode_literals
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
import sickbeard
import six
from sickbeard.common import Quality
from sickbeard.tv import TVShow
from sickchill.helper.exceptions import MultipleShowObjectsException
from sickchill.show.Show import Show
class ShowTests(unittest.TestCase):
"""
Test shows
"""
def test_find(self):
"""
Test find tv shows by indexer_id
"""
sickbeard.QUALITY_DEFAULT = Quality.FULLHDTV
sickbeard.showList = []
show123 = TestTVShow(0, 123)
show456 = TestTVShow(0, 456)
show789 = TestTVShow(0, 789)
shows = [show123, show456, show789]
shows_duplicate = shows + shows
test_cases = {
(False, None): None,
(False, ''): None,
(False, '123'): None,
(False, 123): None,
(False, 12.3): None,
(True, None): None,
(True, ''): None,
(True, '123'): None,
(True, 123): show123,
(True, 12.3): None,
(True, 456): show456,
(True, 789): show789,
}
unicode_test_cases = {
(False, ''): None,
(False, '123'): None,
(True, ''): None,
(True, '123'): None,
}
for tests in test_cases, unicode_test_cases:
for ((use_shows, indexer_id), result) in six.iteritems(tests):
if use_shows:
self.assertEqual(Show.find(shows, indexer_id), result)
else:
self.assertEqual(Show.find(None, indexer_id), result)
with self.assertRaises(MultipleShowObjectsException):
Show.find(shows_duplicate, 456)
def test_validate_indexer_id(self):
"""
Tests if the indexer_id is valid and if so if it returns the right show
"""
sickbeard.QUALITY_DEFAULT = Quality.FULLHDTV
sickbeard.showList = []
show123 = TestTVShow(0, 123)
show456 = TestTVShow(0, 456)
show789 = TestTVShow(0, 789)
sickbeard.showList = [
show123,
show456,
show789,
]
invalid_show_id = ('Invalid show ID', None)
indexer_id_list = [
None, '', '', '123', '123', '456', '456', '789', '789', 123, 456, 789, ['123', '456'], ['123', '456'],
[123, 456]
]
results_list = [
invalid_show_id, invalid_show_id, invalid_show_id, (None, show123), (None, show123), (None, show456),
(None, show456), (None, show789), (None, show789), (None, show123), (None, show456), (None, show789),
invalid_show_id, invalid_show_id, invalid_show_id
]
self.assertEqual(
len(indexer_id_list), len(results_list),
'Number of parameters ({0:d}) and results ({1:d}) does not match'.format(len(indexer_id_list), len(results_list))
)
for (index, indexer_id) in enumerate(indexer_id_list):
self.assertEqual(Show._validate_indexer_id(indexer_id), results_list[index]) # pylint: disable=protected-access
class TestTVShow(TVShow):
"""
A test `TVShow` object that does not need DB access.
"""
def __init__(self, indexer, indexer_id):
super(TestTVShow, self).__init__(indexer, indexer_id)
def loadFromDB(self):
"""
Override TVShow.loadFromDB to avoid DB access during testing
"""
pass
if __name__ == '__main__':
print('=====> Testing {0}'.format(__file__))
SUITE = unittest.TestLoader().loadTestsFromTestCase(ShowTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%Y-%m-%d': '%Y.%m.%d.',
'%Y-%m-%d %H:%M:%S': '%Y.%m.%d. %H:%M:%S',
'%s rows deleted': '%s sorok t\xc3\xb6rl\xc5\x91dtek',
'%s rows updated': '%s sorok friss\xc3\xadt\xc5\x91dtek',
'Available databases and tables': 'El\xc3\xa9rhet\xc5\x91 adatb\xc3\xa1zisok \xc3\xa9s t\xc3\xa1bl\xc3\xa1k',
'Cannot be empty': 'Nem lehet \xc3\xbcres',
'Check to delete': 'T\xc3\xb6rl\xc3\xa9shez v\xc3\xa1laszd ki',
'Client IP': 'Client IP',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Jelenlegi lek\xc3\xa9rdez\xc3\xa9s',
'Current response': 'Jelenlegi v\xc3\xa1lasz',
'Current session': 'Jelenlegi folyamat',
'DB Model': 'DB Model',
'Database': 'Adatb\xc3\xa1zis',
'Delete:': 'T\xc3\xb6r\xc3\xb6l:',
'Description': 'Description',
'E-mail': 'E-mail',
'Edit': 'Szerkeszt',
'Edit This App': 'Alkalmaz\xc3\xa1st szerkeszt',
'Edit current record': 'Aktu\xc3\xa1lis bejegyz\xc3\xa9s szerkeszt\xc3\xa9se',
'First name': 'First name',
'Group ID': 'Group ID',
'Hello World': 'Hello Vil\xc3\xa1g',
'Import/Export': 'Import/Export',
'Index': 'Index',
'Internal State': 'Internal State',
'Invalid Query': 'Hib\xc3\xa1s lek\xc3\xa9rdez\xc3\xa9s',
'Invalid email': 'Invalid email',
'Last name': 'Last name',
'Layout': 'Szerkezet',
'Main Menu': 'F\xc5\x91men\xc3\xbc',
'Menu Model': 'Men\xc3\xbc model',
'Name': 'Name',
'New Record': '\xc3\x9aj bejegyz\xc3\xa9s',
'No databases in this application': 'Nincs adatb\xc3\xa1zis ebben az alkalmaz\xc3\xa1sban',
'Origin': 'Origin',
'Password': 'Password',
'Powered by': 'Powered by',
'Query:': 'Lek\xc3\xa9rdez\xc3\xa9s:',
'Record ID': 'Record ID',
'Registration key': 'Registration key',
'Reset Password key': 'Reset Password key',
'Role': 'Role',
'Rows in table': 'Sorok a t\xc3\xa1bl\xc3\xa1ban',
'Rows selected': 'Kiv\xc3\xa1lasztott sorok',
'Stylesheet': 'Stylesheet',
'Sure you want to delete this object?': 'Biztos t\xc3\xb6rli ezt az objektumot?',
'Table name': 'Table name',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'Timestamp': 'Timestamp',
'Update:': 'Friss\xc3\xadt:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'User ID': 'User ID',
'View': 'N\xc3\xa9zet',
'Welcome to web2py': 'Isten hozott a web2py-ban',
'appadmin is disabled because insecure channel': 'az appadmin a biztons\xc3\xa1gtalan csatorna miatt letiltva',
'cache': 'gyors\xc3\xadt\xc3\xb3t\xc3\xa1r',
'change password': 'jelsz\xc3\xb3 megv\xc3\xa1ltoztat\xc3\xa1sa',
'click here for online examples': 'online p\xc3\xa9ld\xc3\xa1k\xc3\xa9rt kattints ide',
'click here for the administrative interface': 'az adminisztr\xc3\xa1ci\xc3\xb3s fel\xc3\xbclet\xc3\xa9rt kattints ide',
'customize me!': 'v\xc3\xa1ltoztass meg!',
'data uploaded': 'adat felt\xc3\xb6ltve',
'database': 'adatb\xc3\xa1zis',
'database %s select': 'adatb\xc3\xa1zis %s kiv\xc3\xa1laszt\xc3\xa1s',
'db': 'db',
'design': 'design',
'done!': 'k\xc3\xa9sz!',
'edit profile': 'profil szerkeszt\xc3\xa9se',
'export as csv file': 'export\xc3\xa1l csv f\xc3\xa1jlba',
'insert new': '\xc3\xbaj beilleszt\xc3\xa9se',
'insert new %s': '\xc3\xbaj beilleszt\xc3\xa9se %s',
'invalid request': 'hib\xc3\xa1s k\xc3\xa9r\xc3\xa9s',
'login': 'bel\xc3\xa9p',
'logout': 'kil\xc3\xa9p',
'lost password': 'elveszett jelsz\xc3\xb3',
'new record inserted': '\xc3\xbaj bejegyz\xc3\xa9s felv\xc3\xa9ve',
'next 100 rows': 'k\xc3\xb6vetkez\xc5\x91 100 sor',
'or import from csv file': 'vagy bet\xc3\xb6lt\xc3\xa9s csv f\xc3\xa1jlb\xc3\xb3l',
'previous 100 rows': 'el\xc5\x91z\xc5\x91 100 sor',
'record': 'bejegyz\xc3\xa9s',
'record does not exist': 'bejegyz\xc3\xa9s nem l\xc3\xa9tezik',
'record id': 'bejegyz\xc3\xa9s id',
'register': 'regisztr\xc3\xa1ci\xc3\xb3',
'selected': 'kiv\xc3\xa1lasztott',
'state': '\xc3\xa1llapot',
'table': 't\xc3\xa1bla',
'unable to parse csv file': 'nem lehet a csv f\xc3\xa1jlt beolvasni',
}
|
"""Convert HTML page to Word 97 document
This script is used during the build process of "Dive Into Python"
(http://diveintopython.org/) to create the downloadable Word 97 version
of the book (http://diveintopython.org/diveintopython.doc)
Looks for 2 arguments on the command line. The first argument is the input (HTML)
file; the second argument is the output (.doc) file.
Only runs on Windows. Requires Microsoft Word 2000.
Safe to run on the same file(s) more than once. The output file will be
silently overwritten if it already exists.
The script has been modified by xiaq (xiaqqaix@gmail.com) to fit Simplified Chinese version of Microsoft Word.
"""
__author__ = "Mark Pilgrim (mark@diveintopython.org)"
__version__ = "$Revision: 1.2 $"
__date__ = "$Date: 2004/05/05 21:57:19 $"
__copyright__ = "Copyright (c) 2001 Mark Pilgrim"
__license__ = "Python"
import sys, os
from win32com.client import gencache, constants
def makeRealWordDoc(infile, outfile):
word = gencache.EnsureDispatch("Word.Application")
try:
worddoc = word.Documents.Open(FileName=infile)
try:
worddoc.TablesOfContents.Add(Range=word.ActiveWindow.Selection.Range, \
RightAlignPageNumbers=1, \
UseHeadingStyles=1, \
UpperHeadingLevel=1, \
LowerHeadingLevel=2, \
IncludePageNumbers=1, \
AddedStyles='', \
UseHyperlinks=1, \
HidePageNumbersInWeb=1)
worddoc.TablesOfContents(1).TabLeader = constants.wdTabLeaderDots
worddoc.TablesOfContents.Format = constants.wdIndexIndent
word.ActiveWindow.ActivePane.View.SeekView = constants.wdSeekCurrentPageHeader
word.Selection.TypeText(Text="Dive Into Python\t\thttp://diveintopython.org/")
word.ActiveWindow.ActivePane.View.SeekView = constants.wdSeekCurrentPageFooter
word.NormalTemplate.AutoTextEntries("- Ò³Âë -").Insert(Where=word.ActiveWindow.Selection.Range)
word.ActiveWindow.View.Type = constants.wdPrintView
worddoc.TablesOfContents(1).Update()
worddoc.SaveAs(FileName=outfile, \
FileFormat=constants.wdFormatDocument)
finally:
worddoc.Close(0)
del worddoc
finally:
word.Quit()
del word
if __name__ == "__main__":
infile = os.path.normpath(os.path.join(os.getcwd(), sys.argv[1]))
outfile = os.path.normpath(os.path.join(os.getcwd(), sys.argv[2]))
makeRealWordDoc(infile, outfile)
|
"""
Simple roster implementation. Can be used though for different tasks like
mass-renaming of contacts.
"""
from protocol import JID, Iq, Presence, Node, NodeProcessed, NS_MUC_USER, NS_ROSTER
from plugin import PlugIn
import logging
log = logging.getLogger('nbxmpp.roster_nb')
class NonBlockingRoster(PlugIn):
"""
Defines a plenty of methods that will allow you to manage roster. Also
automatically track presences from remote JIDs taking into account that
every JID can have multiple resources connected. Does not currently support
'error' presences. You can also use mapping interface for access to the
internal representation of contacts in roster
"""
def __init__(self, version=None):
"""
Init internal variables
"""
PlugIn.__init__(self)
self.version = version
self._data = {}
self._set=None
self._exported_methods=[self.getRoster]
self.received_from_server = False
def Request(self, force=0):
"""
Request roster from server if it were not yet requested (or if the
'force' argument is set)
"""
if self._set is None:
self._set = 0
elif not force:
return
iq = Iq('get', NS_ROSTER)
if self.version is not None:
iq.setTagAttr('query', 'ver', self.version)
id_ = self._owner.getAnID()
iq.setID(id_)
self._owner.send(iq)
log.info('Roster requested from server')
return id_
def RosterIqHandler(self, dis, stanza):
"""
Subscription tracker. Used internally for setting items state in internal
roster representation
"""
sender = stanza.getAttr('from')
if not sender is None and not sender.bareMatch(
self._owner.User + '@' + self._owner.Server):
return
query = stanza.getTag('query')
if query:
self.received_from_server = True
self.version = stanza.getTagAttr('query', 'ver')
if self.version is None:
self.version = ''
for item in query.getTags('item'):
jid=item.getAttr('jid')
if item.getAttr('subscription')=='remove':
if self._data.has_key(jid): del self._data[jid]
# Looks like we have a workaround
# raise NodeProcessed # a MUST
log.info('Setting roster item %s...' % jid)
if not self._data.has_key(jid): self._data[jid]={}
self._data[jid]['name']=item.getAttr('name')
self._data[jid]['ask']=item.getAttr('ask')
self._data[jid]['subscription']=item.getAttr('subscription')
self._data[jid]['groups']=[]
if not self._data[jid].has_key('resources'): self._data[jid]['resources']={}
for group in item.getTags('group'):
if group.getData() not in self._data[jid]['groups']:
self._data[jid]['groups'].append(group.getData())
self._data[self._owner.User+'@'+self._owner.Server]={'resources': {}, 'name': None, 'ask': None, 'subscription': None, 'groups': None,}
self._set=1
# Looks like we have a workaround
# raise NodeProcessed # a MUST. Otherwise you'll get back an <iq type='error'/>
def PresenceHandler(self, dis, pres):
"""
Presence tracker. Used internally for setting items' resources state in
internal roster representation
"""
if pres.getTag('x', namespace=NS_MUC_USER):
return
jid=pres.getFrom()
if not jid:
# If no from attribue, it's from server
jid=self._owner.Server
jid=JID(jid)
if not self._data.has_key(jid.getStripped()): self._data[jid.getStripped()]={'name':None,'ask':None,'subscription':'none','groups':['Not in roster'],'resources':{}}
if type(self._data[jid.getStripped()]['resources'])!=type(dict()):
self._data[jid.getStripped()]['resources']={}
item=self._data[jid.getStripped()]
typ=pres.getType()
if not typ:
log.info('Setting roster item %s for resource %s...'%(jid.getStripped(), jid.getResource()))
item['resources'][jid.getResource()]=res={'show':None,'status':None,'priority':'0','timestamp':None}
if pres.getTag('show'): res['show']=pres.getShow()
if pres.getTag('status'): res['status']=pres.getStatus()
if pres.getTag('priority'): res['priority']=pres.getPriority()
if not pres.getTimestamp(): pres.setTimestamp()
res['timestamp']=pres.getTimestamp()
elif typ=='unavailable' and item['resources'].has_key(jid.getResource()): del item['resources'][jid.getResource()]
# Need to handle type='error' also
def _getItemData(self, jid, dataname):
"""
Return specific jid's representation in internal format. Used internally
"""
jid = jid[:(jid+'/').find('/')]
return self._data[jid][dataname]
def _getResourceData(self, jid, dataname):
"""
Return specific jid's resource representation in internal format. Used
internally
"""
if jid.find('/') + 1:
jid, resource = jid.split('/', 1)
if self._data[jid]['resources'].has_key(resource):
return self._data[jid]['resources'][resource][dataname]
elif self._data[jid]['resources'].keys():
lastpri = -129
for r in self._data[jid]['resources'].keys():
if int(self._data[jid]['resources'][r]['priority']) > lastpri:
resource, lastpri=r, int(self._data[jid]['resources'][r]['priority'])
return self._data[jid]['resources'][resource][dataname]
def delItem(self, jid):
"""
Delete contact 'jid' from roster
"""
self._owner.send(Iq('set', NS_ROSTER, payload=[Node('item', {'jid': jid, 'subscription': 'remove'})]))
def getAsk(self, jid):
"""
Return 'ask' value of contact 'jid'
"""
return self._getItemData(jid, 'ask')
def getGroups(self, jid):
"""
Return groups list that contact 'jid' belongs to
"""
return self._getItemData(jid, 'groups')
def getName(self, jid):
"""
Return name of contact 'jid'
"""
return self._getItemData(jid, 'name')
def getPriority(self, jid):
"""
Return priority of contact 'jid'. 'jid' should be a full (not bare) JID
"""
return self._getResourceData(jid, 'priority')
def getRawRoster(self):
"""
Return roster representation in internal format
"""
return self._data
def getRawItem(self, jid):
"""
Return roster item 'jid' representation in internal format
"""
return self._data[jid[:(jid+'/').find('/')]]
def getShow(self, jid):
"""
Return 'show' value of contact 'jid'. 'jid' should be a full (not bare)
JID
"""
return self._getResourceData(jid, 'show')
def getStatus(self, jid):
"""
Return 'status' value of contact 'jid'. 'jid' should be a full (not bare)
JID
"""
return self._getResourceData(jid, 'status')
def getSubscription(self, jid):
"""
Return 'subscription' value of contact 'jid'
"""
return self._getItemData(jid, 'subscription')
def getResources(self, jid):
"""
Return list of connected resources of contact 'jid'
"""
return self._data[jid[:(jid+'/').find('/')]]['resources'].keys()
def setItem(self, jid, name=None, groups=[]):
"""
Rename contact 'jid' and sets the groups list that it now belongs to
"""
iq = Iq('set', NS_ROSTER)
query = iq.getTag('query')
attrs = {'jid': jid}
if name:
attrs['name'] = name
item = query.setTag('item', attrs)
for group in groups:
item.addChild(node=Node('group', payload=[group]))
self._owner.send(iq)
def setItemMulti(self, items):
"""
Rename multiple contacts and sets their group lists
"""
iq = Iq('set', NS_ROSTER)
query = iq.getTag('query')
for i in items:
attrs = {'jid': i['jid']}
if i['name']:
attrs['name'] = i['name']
item = query.setTag('item', attrs)
for group in i['groups']:
item.addChild(node=Node('group', payload=[group]))
self._owner.send(iq)
def getItems(self):
"""
Return list of all [bare] JIDs that the roster is currently tracks
"""
return self._data.keys()
def keys(self):
"""
Same as getItems. Provided for the sake of dictionary interface
"""
return self._data.keys()
def __getitem__(self, item):
"""
Get the contact in the internal format. Raises KeyError if JID 'item' is
not in roster
"""
return self._data[item]
def getItem(self, item):
"""
Get the contact in the internal format (or None if JID 'item' is not in
roster)
"""
if self._data.has_key(item):
return self._data[item]
def Subscribe(self, jid):
"""
Send subscription request to JID 'jid'
"""
self._owner.send(Presence(jid, 'subscribe'))
def Unsubscribe(self, jid):
"""
Ask for removing our subscription for JID 'jid'
"""
self._owner.send(Presence(jid, 'unsubscribe'))
def Authorize(self, jid):
"""
Authorize JID 'jid'. Works only if these JID requested auth previously
"""
self._owner.send(Presence(jid, 'subscribed'))
def Unauthorize(self, jid):
"""
Unauthorise JID 'jid'. Use for declining authorisation request or for
removing existing authorization
"""
self._owner.send(Presence(jid, 'unsubscribed'))
def getRaw(self):
"""
Return the internal data representation of the roster
"""
return self._data
def setRaw(self, data):
"""
Return the internal data representation of the roster
"""
self._data = data
self._data[self._owner.User + '@' + self._owner.Server] = {
'resources': {},
'name': None,
'ask': None,
'subscription': None,
'groups': None
}
self._set = 1
def plugin(self, owner, request=1):
"""
Register presence and subscription trackers in the owner's dispatcher.
Also request roster from server if the 'request' argument is set. Used
internally
"""
self._owner.RegisterHandler('iq', self.RosterIqHandler, 'result', NS_ROSTER, makefirst = 1)
self._owner.RegisterHandler('iq', self.RosterIqHandler, 'set', NS_ROSTER)
self._owner.RegisterHandler('presence', self.PresenceHandler)
if request:
return self.Request()
def _on_roster_set(self, data):
if data:
self._owner.Dispatcher.ProcessNonBlocking(data)
if not self._set:
return
if not hasattr(self, '_owner') or not self._owner:
# Connection has been closed by receiving a <stream:error> for ex,
return
self._owner.onreceive(None)
if self.on_ready:
self.on_ready(self)
self.on_ready = None
return True
def getRoster(self, on_ready=None, force=False):
"""
Request roster from server if neccessary and returns self
"""
return_self = True
if not self._set:
self.on_ready = on_ready
self._owner.onreceive(self._on_roster_set)
return_self = False
elif on_ready:
on_ready(self)
return_self = False
if return_self or force:
return self
return None
|
from django.contrib import admin
from django.utils.translation import ugettext as _
from .models import AbuseReport, SearchTermRecord
admin.site.register(AbuseReport)
class SearchTermAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'ip_address', 'get_user_full_name', )
search_fields = ('term', )
def get_user_full_name(self, obj):
if obj.user is None:
return "(%s)" % _(u"None")
return obj.user.get_full_name()
get_user_full_name.short_description = "user"
admin.site.register(SearchTermRecord, SearchTermAdmin)
|
"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
|
from ..daltools.util.full import init
Z = [8., 1., 1.]
Rc = init([0.00000000, 0.00000000, 0.48860959])
Dtot = [0, 0, -0.76539388]
Daa = init([
[ 0.00000000, 0.00000000, -0.28357300],
[ 0.15342658, 0.00000000, 0.12734703],
[-0.15342658, 0.00000000, 0.12734703],
])
QUc = init([-7.31176220, 0., 0., -5.43243232, 0., -6.36258665])
QUN = init([4.38968295, 0., 0., 0., 0., 1.75400326])
QUaa = init([
[-3.29253618, 0.00000000, 0.00000000, -4.54316657, 0.00000000, -4.00465380],
[-0.13213704, 0.00000000, 0.24980518, -0.44463288, 0.00000000, -0.26059139],
[-0.13213704, 0.00000000,-0.24980518, -0.44463288, 0.00000000, -0.26059139]
])
Fab = init([
[-0.11E-03, 0.55E-04, 0.55E-04],
[ 0.55E-04, -0.55E-04, 0.16E-30],
[ 0.55E-04, 0.16E-30, -0.55E-04]
])
Lab = init([
[0.11E-03, 0.28E-03, 0.28E-03],
[0.28E-03, 0.17E-03, 0.22E-03],
[0.28E-03, 0.22E-03, 0.17E-03]
])
la = init([
[0.0392366,-27.2474016 , 27.2081650],
[0.0358964, 27.2214515 ,-27.2573479],
[0.01211180, -0.04775576, 0.03564396],
[0.01210615, -0.00594030, -0.00616584],
[10.69975088, -5.34987556, -5.34987532],
[-10.6565582, 5.3282791 , 5.3282791]
])
O = [
0.76145382,
-0.00001648, 1.75278523,
-0.00007538, 0.00035773, 1.39756345
]
H1O = [
3.11619527,
0.00019911, 1.25132346,
2.11363325, 0.00111442, 2.12790474
]
H1 = [
0.57935224,
0.00018083, 0.43312326,
0.11495546, 0.00004222, 0.45770123
]
H2O = [
3.11568759,
0.00019821, 1.25132443,
-2.11327482, -0.00142746, 2.12790473
]
H2H1 = [
0.04078206,
-0.00008380, -0.01712262,
-0.00000098, 0.00000084, -0.00200285
]
H2 = [
0.57930522,
0.00018221, 0.43312149,
-0.11493635, -0.00016407, 0.45770123
]
Aab = init([O, H1O, H1, H2O, H2H1, H2])
Aa = init([
[ 3.87739525, 0.00018217, 3.00410918, 0.00010384, 0.00020122, 3.52546819 ],
[ 2.15784091, 0.00023848, 1.05022368, 1.17177159, 0.00059985, 1.52065218 ],
[ 2.15754005, 0.00023941, 1.05022240, -1.17157425, -0.00087738, 1.52065217 ]
])
ff = 0.001
rMP = init([
[
[-8.70343886, 0.00000000, 0.00000000, -0.39827574, -3.68114747, 0.00000000, 0.00000000, -4.58632761, 0.00000000, -4.24741556],
[-8.70343235, 0.00076124, 0.00000000, -0.39827535, -3.68114147, 0.00000000, 0.00193493, -4.58631888, 0.00000000, -4.24741290],
[-8.70343291,-0.00076166, 0.00000000, -0.39827505, -3.68114128, 0.00000000, -0.00193603, -4.58631789, 0.00000000, -4.24741229],
[-8.70343685,-0.00000006, 0.00175241, -0.39827457, -3.68114516, 0.00000000, 0.00000161, -4.58632717, 0.00053363, -4.24741642],
[-8.70343685, 0.00000000, -0.00175316, -0.39827456, -3.68114514, 0.00000000, 0.00000000, -4.58632711, -0.00053592, -4.24741639],
[-8.70166502, 0.00000000, 0.00000144, -0.39688042, -3.67884999, 0.00000000, 0.00000000, -4.58395384, 0.00000080, -4.24349307],
[-8.70520554, 0.00000000, 0.00000000, -0.39967554, -3.68344246, 0.00000000, 0.00000000, -4.58868836, 0.00000000, -4.25134640],
],
[
[ 0.00000000, 0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, 0.43066796, 0.04316104, 0.00000000, 0.36285790],
[ 0.00150789, 0.10111974, 0.00000000, 0.11541803, 0.53753360, 0.00000000, 0.43120945, 0.04333774, 0.00000000, 0.36314215],
[-0.00150230, 0.09934695, 0.00000000, 0.11398581, 0.53667861, 0.00000000, 0.43012612, 0.04298361, 0.00000000, 0.36257249],
[ 0.00000331, 0.10023328, 0.00125017, 0.11470067, 0.53710812, -0.00006107, 0.43066944, 0.04316020, 0.00015952, 0.36285848],
[ 0.00000100, 0.10023249, -0.00125247, 0.11470042, 0.53710716, 0.00006135, 0.43066837, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692, 0.10059268, -0.00000064, 0.11590322, 0.53754715, -0.00000006, 0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334, 0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, 0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
[
[-0.64828057, 0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, -0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64978846, 0.10389186, 0.00000000, 0.07204462, -0.47729337, 0.00000000, -0.03154159, -0.47074619, 0.00000000, -0.50963693],
[-0.64677827, 0.10273316, 0.00000000, 0.07173584, -0.47408263, 0.00000000, -0.03134407, -0.46768337, 0.00000000, -0.50674873],
[-0.64828388, 0.10331167, 0.00043314, 0.07189029, -0.47568875, -0.00023642, -0.03144270, -0.46921635, -0.00021728, -0.50819386],
[-0.64828157, 0.10331095, -0.00043311, 0.07188988, -0.47568608, 0.00023641, -0.03144256, -0.46921346, 0.00021729, -0.50819095],
[-0.64916749, 0.10338629, -0.00000024, 0.07234862, -0.47634698, 0.00000013, -0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723, 0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, -0.03129003, -0.46838912, 0.00000000, -0.50701656],
],
[
[ 0.00000000,-0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, -0.43066796, 0.04316104, 0.00000000, 0.36285790],
[-0.00150139,-0.09934749, 0.00000000, 0.11398482, 0.53667874, 0.00000000, -0.43012670, 0.04298387, 0.00000000, 0.36257240],
[ 0.00150826,-0.10112008, 0.00000000, 0.11541676, 0.53753350, 0.00000000, -0.43120982, 0.04333795, 0.00000000, 0.36314186],
[-0.00000130,-0.10023170, 0.00125018, 0.11470018, 0.53710620, 0.00006107, -0.43066732, 0.04316017, 0.00015952, 0.36285728],
[ 0.00000101,-0.10023249, -0.00125247, 0.11470042, 0.53710716, -0.00006135, -0.43066838, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692,-0.10059268, -0.00000064, 0.11590322, 0.53754715, 0.00000006, -0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334,-0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, -0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
[
[ 0.00000000, 0.00000000, 0.00000000, -0.00378789, 0.00148694, 0.00000000, 0.00000000, 0.00599079, 0.00000000, 0.01223822],
[ 0.00000000, 0.00004089, 0.00000000, -0.00378786, 0.00148338, 0.00000000, -0.00004858, 0.00599281, 0.00000000, 0.01224094],
[ 0.00000000,-0.00004067, 0.00000000, -0.00378785, 0.00148341, 0.00000000, 0.00004861, 0.00599277, 0.00000000, 0.01224093],
[ 0.00000000,-0.00000033, -0.00001707, -0.00378763, 0.00149017, 0.00000000, 0.00000001, 0.00599114, -0.00001229, 0.01223979],
[ 0.00000000, 0.00000000, 0.00001717, -0.00378763, 0.00149019, 0.00000000, 0.00000000, 0.00599114, 0.00001242, 0.01223980],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378978, 0.00141897, 0.00000000, 0.00000000, 0.00590445, 0.00000002, 0.01210376],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378577, 0.00155694, 0.00000000, 0.00000000, 0.00607799, 0.00000000, 0.01237393],
],
[
[-0.64828057,-0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, 0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64677918,-0.10273369, 0.00000000, 0.07173576, -0.47408411, 0.00000000, 0.03134408, -0.46768486, 0.00000000, -0.50674986],
[-0.64978883,-0.10389230, 0.00000000, 0.07204446, -0.47729439, 0.00000000, 0.03154159, -0.47074717, 0.00000000, -0.50963754],
[-0.64827927,-0.10331022, 0.00043313, 0.07188947, -0.47568340, 0.00023642, 0.03144242, -0.46921057, -0.00021727, -0.50818804],
[-0.64828158,-0.10331095, -0.00043311, 0.07188988, -0.47568609, -0.00023641, 0.03144256, -0.46921348, 0.00021729, -0.50819097],
[-0.64916749,-0.10338629, -0.00000024, 0.07234862, -0.47634698, -0.00000013, 0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723,-0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, 0.03129003, -0.46838912, 0.00000000, -0.50701656]
]
])
Am = init([
[8.186766009140, 0., 0.],
[0., 5.102747935447, 0.],
[0., 0., 6.565131856389]
])
Amw = init([
[11.98694996213, 0., 0.],
[0., 4.403583657738, 0.],
[0., 0., 2.835142058626]
])
R = [
[ 0.00000, 0.00000, 0.69801],
[-1.48150, 0.00000, -0.34901],
[ 1.48150, 0.00000, -0.34901]
]
Qtot = -10.0
Q = rMP[0, 0, (0, 2, 5)]
D = rMP[1:4, 0, :]
QU = rMP[4:, 0, :]
dQa = rMP[0, :, (0,2,5)]
dQab = rMP[0, :, (1, 3, 4)]
PAn0 = """AU
3 -1 0 1
1 0.000 0.000 0.698
1 -1.481 0.000 -0.349
1 1.481 0.000 -0.349
"""
PA00 = """AU
3 0 0 1
1 0.000 0.000 0.698 -0.703
1 -1.481 0.000 -0.349 0.352
1 1.481 0.000 -0.349 0.352
"""
PA10 = """AU
3 1 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127
"""
PA20 = """AU
3 2 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261
"""
PA21 = """AU
3 2 1 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.466
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 1.576
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 1.576
"""
PA22 = """AU
3 2 2 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.875 -0.000 -0.000 3.000 -0.000 3.524
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 2.156 -0.000 1.106 1.051 -0.000 1.520
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 2.156 -0.000 -1.106 1.051 -0.000 1.520
"""
OUTPUT_n0_1 = """\
---------------
Atomic domain 1
---------------
Domain center: 0.00000 0.00000 0.69801
"""
OUTPUT_00_1 = OUTPUT_n0_1 + """\
Nuclear charge: 8.00000
Electronic charge: -8.70344
Total charge: -0.70344
"""
OUTPUT_10_1 = OUTPUT_00_1 + """\
Electronic dipole -0.00000 0.00000 -0.28357
"""
OUTPUT_20_1 = OUTPUT_10_1 + """\
Electronic quadrupole -3.29254 0.00000 -0.00000 -4.54317 0.00000 -4.00466
"""
OUTPUT_01_1 = OUTPUT_00_1 + """\
Isotropic polarizablity (w=0) 3.46639
"""
OUTPUT_02_1 = OUTPUT_00_1 + """\
Electronic polarizability (w=0) 3.87468 -0.00000 3.00027 -0.00000 -0.00000 3.52422
"""
|
import inctest
error = 0
try:
a = inctest.A()
except:
print "didn't find A"
print "therefore, I didn't include 'testdir/subdir1/hello.i'"
error = 1
pass
try:
b = inctest.B()
except:
print "didn't find B"
print "therefore, I didn't include 'testdir/subdir2/hello.i'"
error = 1
pass
if error == 1:
raise RuntimeError
if inctest.importtest1(5) != 15:
print "import test 1 failed"
raise RuntimeError
if inctest.importtest2("black") != "white":
print "import test 2 failed"
raise RuntimeError
|
from __future__ import absolute_import, print_function, division
from mitmproxy import exceptions
import pprint
def _get_name(itm):
return getattr(itm, "name", itm.__class__.__name__)
class Addons(object):
def __init__(self, master):
self.chain = []
self.master = master
master.options.changed.connect(self.options_update)
def options_update(self, options, updated):
for i in self.chain:
with self.master.handlecontext():
i.configure(options, updated)
def add(self, options, *addons):
if not addons:
raise ValueError("No addons specified.")
self.chain.extend(addons)
for i in addons:
self.invoke_with_context(i, "start")
self.invoke_with_context(
i,
"configure",
self.master.options,
self.master.options.keys()
)
def remove(self, addon):
self.chain = [i for i in self.chain if i is not addon]
self.invoke_with_context(addon, "done")
def done(self):
for i in self.chain:
self.invoke_with_context(i, "done")
def has_addon(self, name):
"""
Is an addon with this name registered?
"""
for i in self.chain:
if _get_name(i) == name:
return True
def __len__(self):
return len(self.chain)
def __str__(self):
return pprint.pformat([str(i) for i in self.chain])
def invoke_with_context(self, addon, name, *args, **kwargs):
with self.master.handlecontext():
self.invoke(addon, name, *args, **kwargs)
def invoke(self, addon, name, *args, **kwargs):
func = getattr(addon, name, None)
if func:
if not callable(func):
raise exceptions.AddonError(
"Addon handler %s not callable" % name
)
func(*args, **kwargs)
def __call__(self, name, *args, **kwargs):
for i in self.chain:
self.invoke(i, name, *args, **kwargs)
|
import os # os.system for clearing screen and simple gam calls
import subprocess # subprocess.Popen is to capture gam output (needed for user info in particular)
import MySQLdb # MySQLdb is to get data from relevant tables
import csv # CSV is used to read output of drive commands that supply data in CSV form
import bip_config # declare installation specific variables
varMySQLHost = bip_config.mysqlconfig['host']
varMySQLUser = bip_config.mysqlconfig['user']
varMySQLPassword = bip_config.mysqlconfig['password']
varMySQLDB = bip_config.mysqlconfig['db']
varCommandGam = bip_config.gamconfig['fullpath']
|
r"""
********************************************
**espressopp.integrator.LangevinThermostat**
********************************************
.. function:: espressopp.integrator.LangevinThermostat(system)
:param system:
:type system:
"""
from espressopp.esutil import cxxinit
from espressopp import pmi
from espressopp.integrator.Extension import *
from _espressopp import integrator_LangevinThermostat
class LangevinThermostatLocal(ExtensionLocal, integrator_LangevinThermostat):
def __init__(self, system):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, integrator_LangevinThermostat, system)
#def enableAdress(self):
# if pmi.workerIsActive():
# self.cxxclass.enableAdress(self);
if pmi.isController :
class LangevinThermostat(Extension):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.integrator.LangevinThermostatLocal',
pmiproperty = [ 'gamma', 'temperature', 'adress' ]
)
|
import argparse
import os
import subprocess
import tempfile
ACTIVE_DISTROS = ("xenial", "artful", "bionic")
def main():
parser = argparse.ArgumentParser()
parser.add_argument("day", help="The day of the results, with format yyyymmdd")
args = parser.parse_args()
install_autopkgtest_results_formatter()
with tempfile.TemporaryDirectory(dir=os.environ.get("HOME")) as temp_dir:
clone_results_repo(temp_dir)
format_results(temp_dir, ACTIVE_DISTROS, args.day)
commit_and_push(temp_dir, args.day)
def install_autopkgtest_results_formatter():
subprocess.check_call(
["sudo", "snap", "install", "autopkgtest-results-formatter", "--edge"]
)
def clone_results_repo(dest_dir):
subprocess.check_call(
["git", "clone", "https://github.com/elopio/autopkgtest-results.git", dest_dir]
)
def format_results(dest_dir, distros, day):
subprocess.check_call(
[
"/snap/bin/autopkgtest-results-formatter",
"--destination",
dest_dir,
"--distros",
*distros,
"--day",
day,
]
)
def commit_and_push(repo_dir, day):
subprocess.check_call(
["git", "config", "--global", "user.email", "u1test+m-o@canonical.com"]
)
subprocess.check_call(["git", "config", "--global", "user.name", "snappy-m-o"])
subprocess.check_call(["git", "-C", repo_dir, "add", "--all"])
subprocess.check_call(
[
"git",
"-C",
repo_dir,
"commit",
"--message",
"Add the results for {}".format(day),
]
)
subprocess.check_call(
[
"git",
"-C",
repo_dir,
"push",
"https://{GH_TOKEN}@github.com/elopio/autopkgtest-results.git".format(
GH_TOKEN=os.environ.get("GH_TOKEN_PPA_AUTOPKGTEST_RESULTS")
),
]
)
if __name__ == "__main__":
main()
|
import gtk
class ExtensionFeatures:
SYSTEM_WIDE = 0
class MountManagerExtension:
"""Base class for mount manager extensions.
Mount manager has only one instance and is created on program startup.
Methods defined in this class are called automatically by the mount manager
so you need to implement them.
"""
# features extension supports
features = ()
def __init__(self, parent, window):
self._parent = parent
self._window = window
self._application = self._parent._application
# create user interface
self._container = gtk.VBox(False, 5)
self._controls = gtk.HBox(False, 5)
separator = gtk.HSeparator()
# pack interface
self._container.pack_end(separator, False, False, 0)
self._container.pack_end(self._controls, False, False, 0)
def can_handle(self, uri):
"""Returns boolean denoting if specified URI can be handled by this extension"""
return False
def get_container(self):
"""Return container widget"""
return self._container
def get_information(self):
"""Returns information about extension"""
icon = None
name = None
return icon, name
def unmount(self, uri):
"""Method called by the mount manager for unmounting the selected URI"""
pass
def focus_object(self):
"""Method called by the mount manager for focusing main object"""
pass
@classmethod
def get_features(cls):
"""Returns set of features supported by extension"""
return cls.features
|
import random
from google.appengine.api import memcache
from google.appengine.ext import ndb
SHARD_KEY_TEMPLATE = 'shard-{}-{:d}'
class GeneralCounterShardConfig(ndb.Model):
num_shards = ndb.IntegerProperty(default=20)
@classmethod
def all_keys(cls, name):
config = cls.get_or_insert(name)
shard_key_strings = [SHARD_KEY_TEMPLATE.format(name, index) for index in range(config.num_shards)]
return [ndb.Key(GeneralCounterShard, shard_key_string) for shard_key_string in shard_key_strings]
class GeneralCounterShard(ndb.Model):
count = ndb.IntegerProperty(default=0)
def get_count(name):
total = memcache.get(name)
if total is None:
total = 0
parent_key = ndb.Key('ShardCounterParent', name)
shard_query = GeneralCounterShard.query(ancestor=parent_key)
shard_counters = shard_query.fetch(limit=None)
for counter in shard_counters:
if counter is not None:
total += counter.count
memcache.add(name, total, 7200) # 2 hours to expire
return total
def increment(name):
config = GeneralCounterShardConfig.get_or_insert(name)
return _increment(name, config.num_shards)
@ndb.transactional
def _increment(name, num_shards):
index = random.randint(0, num_shards - 1)
shard_key_string = SHARD_KEY_TEMPLATE.format(name, index)
parent_key = ndb.Key('ShardCounterParent', name)
counter = GeneralCounterShard.get_by_id(shard_key_string, parent = parent_key)
if counter is None:
counter = GeneralCounterShard(parent = parent_key, id=shard_key_string)
counter.count += 1
counter.put()
rval = memcache.incr(name) # Memcache increment does nothing if the name is not a key in memcache
if rval is None:
return get_count(name)
return rval
@ndb.transactional
def increase_shards(name, num_shards):
config = GeneralCounterShardConfig.get_or_insert(name)
if config.num_shards < num_shards:
config.num_shards = num_shards
config.put()
|
class Serializer(object):
schemaless = True
encapsulate = True
registry = {}
def __init__(self, query_params, pretty=False, **kwargs):
self.pretty = pretty
self._query_params = query_params
self._fileName = None
self._lastModified = None
self._extra_args = kwargs
@classmethod
def register(cls, tag, serializer):
cls.registry[tag] = serializer
@classmethod
def getAllFormats(cls):
return list(cls.registry)
@classmethod
def create(cls, dformat, query_params=None, **kwargs):
"""
A serializer factory
"""
query_params = query_params or {}
serializer = cls.registry.get(dformat)
if serializer:
return serializer(query_params, **kwargs)
else:
raise Exception("Serializer for '%s' does not exist!" % dformat)
def getMIMEType(self):
return self._mime
def set_headers(self, response):
response.content_type = self.getMIMEType()
def __call__(self, obj, *args, **kwargs):
self._obj = obj
self._data = self._execute(obj, *args, **kwargs)
return self._data
from indico.web.http_api.metadata.json import JSONSerializer
from indico.web.http_api.metadata.xml import XMLSerializer
|
"""Data models for referral system."""
from __future__ import unicode_literals
from builtins import map
from django.db import models
from django.core.urlresolvers import reverse
from pttrack.models import (ReferralType, ReferralLocation, Note,
ContactMethod, CompletableMixin,)
from followup.models import ContactResult, NoAptReason, NoShowReason
class Referral(Note):
"""A record of a particular patient's referral to a particular center."""
STATUS_SUCCESSFUL = 'S'
STATUS_PENDING = 'P'
STATUS_UNSUCCESSFUL = 'U'
# Status if there are no referrals of a specific type
# Used in aggregate_referral_status
NO_REFERRALS_CURRENTLY = "No referrals currently"
REFERRAL_STATUSES = (
(STATUS_SUCCESSFUL, 'Successful'),
(STATUS_PENDING, 'Pending'),
(STATUS_UNSUCCESSFUL, 'Unsuccessful'),
)
location = models.ManyToManyField(ReferralLocation)
comments = models.TextField(blank=True)
status = models.CharField(
max_length=50, choices=REFERRAL_STATUSES, default=STATUS_PENDING)
kind = models.ForeignKey(
ReferralType,
help_text="The kind of care the patient should recieve at the "
"referral location.")
def __str__(self):
"""Provides string to display on front end for referral.
For FQHC referrals, returns referral kind and date.
For non-FQHC referrals, returns referral location and date.
"""
formatted_date = self.written_datetime.strftime("%D")
if self.kind.is_fqhc:
return "%s referral on %s" % (self.kind, formatted_date)
else:
location_names = [loc.name for loc in self.location.all()]
locations = " ,".join(location_names)
return "Referral to %s on %s" % (locations, formatted_date)
@staticmethod
def aggregate_referral_status(referrals):
referral_status_output = ""
if referrals:
all_successful = all(referral.status == Referral.STATUS_SUCCESSFUL
for referral in referrals)
if all_successful:
referral_status_output = (dict(Referral.REFERRAL_STATUSES)
[Referral.STATUS_SUCCESSFUL])
else:
# Determine referral status based on the last FQHC referral
referral_status_output = (dict(Referral.REFERRAL_STATUSES)
[referrals.last().status])
else:
referral_status_output = Referral.NO_REFERRALS_CURRENTLY
return referral_status_output
class FollowupRequest(Note, CompletableMixin):
referral = models.ForeignKey(Referral)
contact_instructions = models.TextField()
MARK_DONE_URL_NAME = 'new-patient-contact'
ADMIN_URL_NAME = ''
def class_name(self):
return self.__class__.__name__
def short_name(self):
return "Referral"
def summary(self):
return self.contact_instructions
def mark_done_url(self):
return reverse(self.MARK_DONE_URL_NAME,
args=(self.referral.patient.id,
self.referral.id,
self.id))
def admin_url(self):
return reverse(
'admin:referral_followuprequest_change',
args=(self.id,)
)
def __str__(self):
formatted_date = self.due_date.strftime("%D")
return 'Followup with %s on %s about %s' % (self.patient,
formatted_date,
self.referral)
class PatientContact(Note):
followup_request = models.ForeignKey(FollowupRequest)
referral = models.ForeignKey(Referral)
contact_method = models.ForeignKey(
ContactMethod,
null=False,
blank=False,
help_text="What was the method of contact?")
contact_status = models.ForeignKey(
ContactResult,
blank=False,
null=False,
help_text="Did you make contact with the patient about this referral?")
PTSHOW_YES = "Y"
PTSHOW_NO = "N"
PTSHOW_OPTS = [(PTSHOW_YES, "Yes"),
(PTSHOW_NO, "No")]
has_appointment = models.CharField(
choices=PTSHOW_OPTS,
blank=True, max_length=1,
verbose_name="Appointment scheduled?",
help_text="Did the patient make an appointment?")
no_apt_reason = models.ForeignKey(
NoAptReason,
blank=True,
null=True,
verbose_name="No appointment reason",
help_text="If the patient didn't make an appointment, why not?")
appointment_location = models.ManyToManyField(
ReferralLocation,
blank=True,
help_text="Where did the patient make an appointment?")
pt_showed = models.CharField(
max_length=1,
choices=PTSHOW_OPTS,
blank=True,
null=True,
verbose_name="Appointment attended?",
help_text="Did the patient show up to the appointment?")
no_show_reason = models.ForeignKey(
NoShowReason,
blank=True,
null=True,
help_text="If the patient didn't go to the appointment, why not?")
def short_text(self):
"""Return a short text description of this followup and what happened.
Used on the patient chart view as the text in the list of followups.
"""
text = ""
locations = " ,".join(map(str, self.appointment_location.all()))
if self.pt_showed == self.PTSHOW_YES:
text = "Patient went to appointment at " + locations + "."
else:
if self.has_appointment == self.PTSHOW_YES:
text = ("Patient made appointment at " + locations +
"but has not yet gone.")
else:
if self.contact_status.patient_reached:
text = ("Successfully contacted patient but the "
"patient has not made an appointment yet.")
else:
text = "Did not successfully contact patient"
return text
|
'''Test the analysis.signal module.'''
from __future__ import absolute_import, print_function, division
import pytest
import numpy as np
import gridcells.analysis.signal as asignal
from gridcells.analysis.signal import (local_extrema, local_maxima,
local_minima, ExtremumTypes,
LocalExtrema)
RTOL = 1e-10
def _data_generator(n_items, sz):
'''Generate pairs of test vectors.'''
it = 0
while it < n_items:
N1 = np.random.randint(sz) + 1
N2 = np.random.randint(sz) + 1
if N1 == 0 and N2 == 0:
continue
a1 = np.random.rand(N1)
a2 = np.random.rand(N2)
yield (a1, a2)
it += 1
class TestCorrelation(object):
'''
Test the analysis.signal.corr function (and effectively the core of the
autoCorrelation) function.
'''
maxN = 500
maxLoops = 1000
def test_onesided(self):
'''Test the one-sided version of ``corr``.'''
for a1, a2 in _data_generator(self.maxLoops, self.maxN):
c_cpp = asignal.corr(a1, a2, mode='onesided')
c_np = np.correlate(a1, a2, mode='full')[::-1][a1.size - 1:]
np.testing.assert_allclose(c_cpp, c_np, rtol=RTOL)
def test_twosided(self):
'''Test the two-sided version of ``corr``.'''
for a1, a2 in _data_generator(self.maxLoops, self.maxN):
c_cpp = asignal.corr(a1, a2, mode='twosided')
c_np = np.correlate(a1, a2, mode='full')[::-1]
np.testing.assert_allclose(c_cpp, c_np, rtol=RTOL)
def test_range(self):
'''Test the ranged version of ``corr``.'''
# Half the range of both signals
for a1, a2 in _data_generator(self.maxLoops, self.maxN):
if a1.size <= 1 or a2.size <= 1:
continue
lag_start = - (a1.size // 2)
lag_end = a2.size // 2
c_np_centre = a1.size - 1
c_cpp = asignal.corr(a1, a2, mode='range', lag_start=lag_start,
lag_end=lag_end)
c_np = np.correlate(a1, a2, mode='full')[::-1]
np.testing.assert_allclose(
c_cpp,
c_np[c_np_centre + lag_start:c_np_centre + lag_end + 1],
rtol=RTOL)
def test_zero_len(self):
'''Test that an exception is raised when inputs have zero length.'''
a1 = np.array([])
a2 = np.arange(10)
# corr(a1, a2)
lag_start = 0
lag_end = 0
for mode in ("onesided", "twosided", "range"):
with pytest.raises(TypeError):
asignal.corr(a1, a2, mode, lag_start, lag_end)
with pytest.raises(TypeError):
asignal.corr(a2, a1, mode, lag_start, lag_end)
with pytest.raises(TypeError):
asignal.corr(a1, a1, mode, lag_start, lag_end)
def test_non_double(self):
'''Test the corr function when dtype is not double.'''
a1 = np.array([1, 2, 3], dtype=int)
asignal.corr(a1, a1, mode='twosided')
class TestAutoCorrelation(object):
'''Test the acorr function.'''
maxN = 500
maxLoops = 1000
def test_default_params(self):
'''Test default parameters.'''
a = np.arange(10)
c_cpp = asignal.acorr(a)
c_np = np.correlate(a, a, mode='full')[::-1][a.size - 1:]
np.testing.assert_allclose(c_cpp, c_np, rtol=RTOL)
def test_onesided(self):
'''Test the one-sided version of ``corr``.'''
a = np.arange(10)
c_cpp = asignal.acorr(a, mode='onesided', max_lag=5)
c_np = np.correlate(a, a, mode='full')[::-1][a.size - 1:a.size - 1 + 6]
np.testing.assert_allclose(c_cpp, c_np, rtol=RTOL)
def test_twosided(self):
'''Test the two-sided version of ``corr``.'''
a = np.arange(10)
c_cpp = asignal.acorr(a, mode='twosided', max_lag=5)
c_np = np.correlate(a, a, mode='full')[::-1][a.size - 6:a.size + 5]
np.testing.assert_allclose(c_cpp, c_np, rtol=RTOL)
def test_norm(self):
'''Test normalization.'''
# Simple array
a = np.arange(10)
c_cpp = asignal.acorr(a, mode='twosided', norm=True)
c_np = np.correlate(a, a, mode='full')[::-1]
np.testing.assert_allclose(c_cpp, c_np / np.max(c_np), rtol=RTOL)
# A zero array will return zero
zero_array = np.zeros(13)
c_cpp = asignal.acorr(zero_array, mode='twosided', norm=True)
assert np.all(c_cpp == 0.)
def generate_sin(n_half_cycles, resolution=100):
'''Generate a sine function with a number of (full) half cycles.
Note that the positions of the extrema might be shifted +/- 1 with respect
to the actual real sin because of possible rounding errors.
Parameters
----------
n_half_cycles : int
Number of half cycles to generate. Does not have to be even.
resolution : int
Number of data points for each half cycle.
'''
if n_half_cycles < 1:
raise ValueError()
if resolution < 1:
raise ValueError()
f = 1. / (2 * resolution)
t = np.arange(n_half_cycles * resolution, dtype=float)
sig = np.sin(2 * np.pi * f * t)
extrema_positions = np.array(np.arange(n_half_cycles) * resolution +
resolution / 2,
dtype=int)
extrema_types = []
current_type = ExtremumTypes.MAX
for _ in range(n_half_cycles):
extrema_types.append(current_type)
if current_type is ExtremumTypes.MAX:
current_type = ExtremumTypes.MIN
else:
current_type = ExtremumTypes.MAX
return (sig, extrema_positions, np.array(extrema_types))
class TestLocalExtrema(object):
'''Test computation of local extrema.'''
def test_local_extrema(self):
for n_extrema in [1, 2, 51]:
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
extrema = local_extrema(sig)
assert len(extrema) == n_extrema
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MIN] ==
extrema.get_type(ExtremumTypes.MIN))
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MAX] ==
extrema.get_type(ExtremumTypes.MAX))
def test_zero_array(self):
for func in [local_extrema, local_maxima, local_minima]:
extrema = func(np.empty(0))
assert len(extrema) == 0
def test_single_item(self):
'''This should return a zero length array.'''
for func in [local_extrema, local_maxima, local_minima]:
extrema = func(np.array([1.]))
assert len(extrema) == 0
def test_maxima(self):
# One maximum only
for n_extrema in [1, 2]:
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
maxima = local_maxima(sig)
assert len(maxima) == 1
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MAX] ==
maxima)
# 2 maxima
for n_extrema in [3, 4]:
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
maxima = local_maxima(sig)
assert len(maxima) == 2
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MAX] ==
maxima)
def test_minima(self):
# Only one maximum so should return empty
n_extrema = 1
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
minima = local_minima(sig)
assert len(minima) == 0
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MIN] ==
minima)
# One maximum and minimum
n_extrema = 2
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
minima = local_minima(sig)
assert len(minima) == 1
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MIN] ==
minima)
# 2 minima
for n_extrema in [4, 5]:
sig, extrema_idx, extrema_types = generate_sin(n_extrema)
minima = local_minima(sig)
assert len(minima) == 2
assert np.all(extrema_idx[extrema_types == ExtremumTypes.MIN] ==
minima)
class TestLocalExtremaClass(object):
'''Test the local extremum object.'''
def test_empty(self):
extrema = LocalExtrema([], [])
assert len(extrema) == 0
assert len(extrema.get_type(ExtremumTypes.MIN)) == 0 # FIXME
def test_inconsistent_inputs(self):
with pytest.raises(IndexError):
extrema = LocalExtrema([], [1])
with pytest.raises(IndexError):
extrema = LocalExtrema(np.arange(10), [1])
def test_single_type(self):
N = 10
test_vector = np.arange(N)
for tested_type in ExtremumTypes:
extrema = LocalExtrema(test_vector, [tested_type] * N)
assert len(extrema) == N
for current_type in ExtremumTypes:
retrieved = extrema.get_type(current_type)
if current_type is tested_type:
assert len(retrieved) == N
assert np.all(retrieved == test_vector)
else:
assert len(retrieved) == 0
def test_mixed_types(self):
N = 10
test_vector = np.arange(10)
test_types = np.ones(N) * ExtremumTypes.MIN
test_types[0:10:2] = ExtremumTypes.MAX
extrema = LocalExtrema(test_vector, test_types)
assert len(extrema) == N
retrieved_min = extrema.get_type(ExtremumTypes.MIN)
assert np.all(retrieved_min == test_vector[1:10:2])
retrieved_max = extrema.get_type(ExtremumTypes.MAX)
assert np.all(retrieved_max == test_vector[0:10:2])
# Should not find any other types
for current_type in ExtremumTypes:
if (current_type is not ExtremumTypes.MIN and current_type is not
ExtremumTypes.MAX):
assert len(extrema.get_type(current_type)) == 0
|
"""Add timetable related tables
Revision ID: 33a1d6f25951
Revises: 225d0750c216
Create Date: 2015-11-25 14:05:51.856236
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum, UTCDateTime
from indico.modules.events.timetable.models.entries import TimetableEntryType
revision = '33a1d6f25951'
down_revision = '225d0750c216'
def upgrade():
# Break
op.create_table(
'breaks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('duration', sa.Interval(), nullable=False),
sa.Column('text_color', sa.String(), nullable=False),
sa.Column('background_color', sa.String(), nullable=False),
sa.Column('room_name', sa.String(), nullable=False),
sa.Column('inherit_location', sa.Boolean(), nullable=False),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('venue_id', sa.Integer(), nullable=True, index=True),
sa.Column('venue_name', sa.String(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=True, index=True),
sa.CheckConstraint("(room_id IS NULL) OR (venue_name = '' AND room_name = '')",
name='no_custom_location_if_room'),
sa.CheckConstraint("(venue_id IS NULL) OR (venue_name = '')", name='no_venue_name_if_venue_id'),
sa.CheckConstraint("(room_id IS NULL) OR (venue_id IS NOT NULL)", name='venue_id_if_room_id'),
sa.CheckConstraint("NOT inherit_location OR (venue_id IS NULL AND room_id IS NULL AND venue_name = '' AND "
"room_name = '' AND address = '')", name='inherited_location'),
sa.CheckConstraint("(text_color = '') = (background_color = '')", name='both_or_no_colors'),
sa.CheckConstraint("text_color != '' AND background_color != ''", name='colors_not_empty'),
sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
sa.ForeignKeyConstraint(['venue_id'], ['roombooking.locations.id']),
sa.ForeignKeyConstraint(['venue_id', 'room_id'], ['roombooking.rooms.location_id', 'roombooking.rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
# TimetableEntry
op.create_table(
'timetable_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('parent_id', sa.Integer(), nullable=True, index=True),
sa.Column('session_block_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('contribution_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('break_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('type', PyIntEnum(TimetableEntryType), nullable=False),
sa.Column('start_dt', UTCDateTime, nullable=False),
sa.Index('ix_timetable_entries_start_dt_desc', sa.text('start_dt DESC')),
sa.CheckConstraint('type != 1 OR parent_id IS NULL', name='valid_parent'),
sa.CheckConstraint('type != 1 OR (contribution_id IS NULL AND break_id IS NULL AND '
'session_block_id IS NOT NULL)', name='valid_session_block'),
sa.CheckConstraint('type != 2 OR (session_block_id IS NULL AND break_id IS NULL AND '
'contribution_id IS NOT NULL)', name='valid_contribution'),
sa.CheckConstraint('type != 3 OR (contribution_id IS NULL AND session_block_id IS NULL AND '
'break_id IS NOT NULL)', name='valid_break'),
sa.ForeignKeyConstraint(['break_id'], ['events.breaks.id']),
sa.ForeignKeyConstraint(['contribution_id'], ['events.contributions.id']),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.ForeignKeyConstraint(['parent_id'], ['events.timetable_entries.id']),
sa.ForeignKeyConstraint(['session_block_id'], ['events.session_blocks.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('timetable_entries', schema='events')
op.drop_table('breaks', schema='events')
|
import constants, sys
from charsetgroupprober import CharSetGroupProber
from sbcharsetprober import SingleByteCharSetProber
from langcyrillicmodel import Win1251CyrillicModel, Koi8rModel, Latin5CyrillicModel, MacCyrillicModel, Ibm866Model, Ibm855Model
from langgreekmodel import Latin7GreekModel, Win1253GreekModel
from langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
from langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from langthaimodel import TIS620ThaiModel
from langhebrewmodel import Win1255HebrewModel
from hebrewprober import HebrewProber
class SBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [ \
SingleByteCharSetProber(Win1251CyrillicModel),
SingleByteCharSetProber(Koi8rModel),
SingleByteCharSetProber(Latin5CyrillicModel),
SingleByteCharSetProber(MacCyrillicModel),
SingleByteCharSetProber(Ibm866Model),
SingleByteCharSetProber(Ibm855Model),
SingleByteCharSetProber(Latin7GreekModel),
SingleByteCharSetProber(Win1253GreekModel),
SingleByteCharSetProber(Latin5BulgarianModel),
SingleByteCharSetProber(Win1251BulgarianModel),
SingleByteCharSetProber(Latin2HungarianModel),
SingleByteCharSetProber(Win1250HungarianModel),
SingleByteCharSetProber(TIS620ThaiModel),
]
hebrewProber = HebrewProber()
logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, constants.False, hebrewProber)
visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, constants.True, hebrewProber)
hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
self._mProbers.extend([hebrewProber, logicalHebrewProber, visualHebrewProber])
self.reset()
|
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages
|
from __future__ import unicode_literals
import unittest
class TestEInvoiceRequestLog(unittest.TestCase):
pass
|
import pytest
from umodbus.server.serial import AbstractSerialServer
@pytest.fixture
def abstract_serial_server():
return AbstractSerialServer()
def test_abstract_serial_server_get_meta_data(abstract_serial_server):
""" Test if meta data is correctly extracted from request. """
assert abstract_serial_server.get_meta_data(b'\x01x\02\x03') ==\
{'unit_id': 1}
def test_abract_serial_server_shutdown(abstract_serial_server):
assert abstract_serial_server._shutdown_request is False
abstract_serial_server.shutdown()
assert abstract_serial_server._shutdown_request is True
|
panel_file = open('panels.txt','r')
name_file = open('testName.txt','r')
sample_type_file = open("sampleType.txt")
test_panel_results = open("output/testPanelResults.txt", 'w')
panel = []
type = []
test_names = []
def get_split_names( name ):
split_name_list = name.split("/")
for i in range(0, len(split_name_list)):
split_name_list[i] = split_name_list[i].strip()
return split_name_list
def esc_char(name):
if "'" in name:
return "$$" + name + "$$"
else:
return "'" + name + "'"
for line in panel_file:
panel.append(line.strip())
panel_file.close()
for line in sample_type_file:
type.append(line.strip())
sample_type_file.close()
for line in name_file:
test_names.append(line.strip())
name_file.close()
test_panel_results.write("Below should be pasted to TestPanel.csv\n\n")
for row in range(0, len(test_names)):
if len(panel[row]) > 1:
test_description = esc_char(test_names[row] + "(" + type[row] + ")")
test_panel_results.write("nextval( 'panel_item_seq' ) , (select id from panel where name = '" + panel[row] + "')")
test_panel_results.write(" , (select id from test where description = " + test_description + ") , null , now() \n")
test_panel_results.close()
print "Done look for results in testPanelResults.txt"
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from shoop.admin.form_part import FormPart, TemplatedFormDef
from shoop.core.models import Shop
from shoop.discount_pricing.models import DiscountedProductPrice
class DiscountPricingForm(forms.Form):
def __init__(self, **kwargs):
self.product = kwargs.pop("product")
super(DiscountPricingForm, self).__init__(**kwargs)
self.shops = []
if self.product:
self._build_fields()
def _build_fields(self):
self.shops = list(Shop.objects.all())
prices_by_shop_and_group = dict(
(shop_id, price)
for (shop_id, price)
in DiscountedProductPrice.objects.filter(product=self.product)
.values_list("shop_id", "price_value")
)
for shop in self.shops:
name = self._get_field_name(shop)
price = prices_by_shop_and_group.get(shop.id)
price_field = forms.DecimalField(
min_value=0, initial=price,
label=_("Price (%s)") % shop, required=False
)
self.fields[name] = price_field
def _get_field_name(self, shop):
return "s_%d" % shop.id
def _process_single_save(self, shop):
name = self._get_field_name(shop)
value = self.cleaned_data.get(name)
clear = (value is None or value < 0)
if clear:
DiscountedProductPrice.objects.filter(product=self.product, shop=shop).delete()
else:
(spp, created) = DiscountedProductPrice.objects.get_or_create(
product=self.product, shop=shop,
defaults={'price_value': value})
if not created:
spp.price_value = value
spp.save()
def save(self):
if not self.has_changed(): # No changes, so no need to do anything.
return
for shop in self.shops:
self._process_single_save(shop)
def get_shop_field(self, shop):
name = self._get_field_name(shop)
return self[name]
class DiscountPricingFormPart(FormPart):
priority = 10
def get_form_defs(self):
yield TemplatedFormDef(
name="discount_pricing",
form_class=DiscountPricingForm,
template_name="shoop/admin/discount_pricing/form_part.jinja",
required=False,
kwargs={"product": self.object}
)
def form_valid(self, form):
form["discount_pricing"].save()
|
import base64
import netsvc
from osv import osv
from osv import fields
from tools.translate import _
import tools
def _reopen(self, wizard_id, res_model, res_id):
return {'type': 'ir.actions.act_window',
'view_mode': 'form',
'view_type': 'form',
'res_id': wizard_id,
'res_model': self._name,
'target': 'new',
# save original model in context, otherwise
# it will be lost on the action's context switch
'context': {'mail.compose.target.model': res_model,
'mail.compose.target.id': res_id,}
}
class mail_compose_message(osv.osv_memory):
_inherit = 'mail.compose.message'
def _get_templates(self, cr, uid, context=None):
"""
Return Email Template of particular Model.
"""
if context is None:
context = {}
record_ids = []
email_template= self.pool.get('email.template')
model = False
if context.get('message_id'):
mail_message = self.pool.get('mail.message')
message_data = mail_message.browse(cr, uid, int(context.get('message_id')), context)
model = message_data.model
elif context.get('mail.compose.target.model') or context.get('active_model'):
model = context.get('mail.compose.target.model', context.get('active_model'))
if model:
record_ids = email_template.search(cr, uid, [('model', '=', model)])
return email_template.name_get(cr, uid, record_ids, context) + [(False,'')]
return []
_columns = {
'use_template': fields.boolean('Use Template'),
'template_id': fields.selection(_get_templates, 'Template',
size=-1 # means we want an int db column
),
}
_defaults = {
'template_id' : lambda self, cr, uid, context={} : context.get('mail.compose.template_id', False)
}
def on_change_template(self, cr, uid, ids, use_template, template_id, email_from=None, email_to=None, context=None):
if context is None:
context = {}
values = {}
if template_id:
res_id = context.get('mail.compose.target.id') or context.get('active_id') or False
if context.get('mail.compose.message.mode') == 'mass_mail':
# use the original template values - to be rendered when actually sent
# by super.send_mail()
values = self.pool.get('email.template').read(cr, uid, template_id, self.fields_get_keys(cr, uid), context)
report_xml_pool = self.pool.get('ir.actions.report.xml')
template = self.pool.get('email.template').get_email_template(cr, uid, template_id, res_id, context)
values['attachments'] = False
attachments = {}
if template.report_template:
report_name = self.render_template(cr, uid, template.report_name, template.model, res_id, context=context)
report_service = 'report.' + report_xml_pool.browse(cr, uid, template.report_template.id, context).report_name
# Ensure report is rendered using template's language
ctx = context.copy()
if template.lang:
ctx['lang'] = self.render_template(cr, uid, template.lang, template.model, res_id, context)
service = netsvc.LocalService(report_service)
(result, format) = service.create(cr, uid, [res_id], {'model': template.model}, ctx)
result = base64.b64encode(result)
if not report_name:
report_name = report_service
ext = "." + format
if not report_name.endswith(ext):
report_name += ext
attachments[report_name] = result
# Add document attachments
for attach in template.attachment_ids:
# keep the bytes as fetched from the db, base64 encoded
attachments[attach.datas_fname] = attach.datas
values['attachments'] = attachments
if values['attachments']:
attachment = values.pop('attachments')
attachment_obj = self.pool.get('ir.attachment')
att_ids = []
for fname, fcontent in attachment.iteritems():
data_attach = {
'name': fname,
'datas': fcontent,
'datas_fname': fname,
'description': fname,
'res_model' : self._name,
'res_id' : ids[0] if ids else False
}
att_ids.append(attachment_obj.create(cr, uid, data_attach))
values['attachment_ids'] = att_ids
else:
# render the mail as one-shot
values = self.pool.get('email.template').generate_email(cr, uid, template_id, res_id, context=context)
# retrofit generated attachments in the expected field format
if values['attachments']:
attachment = values.pop('attachments')
attachment_obj = self.pool.get('ir.attachment')
att_ids = []
for fname, fcontent in attachment.iteritems():
data_attach = {
'name': fname,
'datas': fcontent,
'datas_fname': fname,
'description': fname,
'res_model' : self._name,
'res_id' : ids[0] if ids else False
}
att_ids.append(attachment_obj.create(cr, uid, data_attach))
values['attachment_ids'] = att_ids
else:
# restore defaults
values = self.default_get(cr, uid, self.fields_get_keys(cr, uid), context)
values.update(use_template=use_template, template_id=template_id)
return {'value': values}
def template_toggle(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
had_template = record.use_template
record.write({'use_template': not(had_template)})
if had_template:
# equivalent to choosing an empty template
onchange_defaults = self.on_change_template(cr, uid, record.id, not(had_template),
False, email_from=record.email_from,
email_to=record.email_to, context=context)
record.write(onchange_defaults['value'])
return _reopen(self, record.id, record.model, record.res_id)
def save_as_template(self, cr, uid, ids, context=None):
if context is None:
context = {}
email_template = self.pool.get('email.template')
model_pool = self.pool.get('ir.model')
for record in self.browse(cr, uid, ids, context=context):
model = record.model or context.get('active_model')
model_ids = model_pool.search(cr, uid, [('model', '=', model)])
model_id = model_ids and model_ids[0] or False
model_name = ''
if model_id:
model_name = model_pool.browse(cr, uid, model_id, context=context).name
template_name = "%s: %s" % (model_name, tools.ustr(record.subject))
values = {
'name': template_name,
'email_from': record.email_from or False,
'subject': record.subject or False,
'body_text': record.body_text or False,
'email_to': record.email_to or False,
'email_cc': record.email_cc or False,
'email_bcc': record.email_bcc or False,
'reply_to': record.reply_to or False,
'model_id': model_id or False,
'attachment_ids': [(6, 0, [att.id for att in record.attachment_ids])]
}
template_id = email_template.create(cr, uid, values, context=context)
record.write({'template_id': template_id,
'use_template': True})
# _reopen same wizard screen with new template preselected
return _reopen(self, record.id, model, record.res_id)
# override the basic implementation
def render_template(self, cr, uid, template, model, res_id, context=None):
return self.pool.get('email.template').render_template(cr, uid, template, model, res_id, context=context)
|
from __future__ import absolute_import, unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedules', '0005_auto_20171010_1722'),
]
operations = [
migrations.CreateModel(
name='ScheduleExperience',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('experience_type', models.PositiveSmallIntegerField(default=0, choices=[(0, b'Recurring Nudge and Upgrade Reminder'), (1, b'Course Updates')])),
('schedule', models.OneToOneField(related_name='experience', to='schedules.Schedule', on_delete=models.CASCADE)),
],
),
]
|
from openerp.addons.financial.tests.financial_test_classes import \
FinancialTestCase
class ManualFinancialProcess(FinancialTestCase):
def setUp(self):
self.financial_model = self.env['financial.move']
super(ManualFinancialProcess, self).setUp()
def test_01_check_return_views(self):
"""Check if view is correctly called for python code"""
# test for len(financial.move) == 1
financial_move_id = self.financial_model.search([], limit=1)
action = financial_move_id.action_view_financial('2receive')
self.assertEqual(
action.get('display_name'),
'financial.move.debt.2receive.form (in financial)')
self.assertEqual(
action.get('res_id'), financial_move_id.id)
action = financial_move_id.action_view_financial('2pay')
self.assertEqual(
action.get('display_name'),
'financial.move.debt.2pay.form (in financial)')
self.assertEqual(
action.get('res_id'), financial_move_id.id)
# test for len(financial.move) > 1
financial_move_id = self.financial_model.search([], limit=2)
action = financial_move_id.action_view_financial('2pay')
self.assertEqual(action.get('domain')[0][2], financial_move_id.ids)
# test for len(financial.move) < 1
action = self.financial_model.action_view_financial('2pay')
self.assertEqual(action.get('type'), 'ir.actions.act_window_close')
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from shuup.core.models import Category
from shuup.xtheme import TemplatedPlugin
from shuup.xtheme.plugins.forms import GenericPluginForm, TranslatableField
class CategoryLinksConfigForm(GenericPluginForm):
"""
A configuration form for the CategoryLinksPlugin
"""
def populate(self):
"""
A custom populate method to display category choices
"""
for field in self.plugin.fields:
if isinstance(field, tuple):
name, value = field
value.initial = self.plugin.config.get(name, value.initial)
self.fields[name] = value
self.fields["categories"] = forms.ModelMultipleChoiceField(
queryset=Category.objects.all_visible(customer=None),
required=False,
initial=self.plugin.config.get("categories", None),
)
def clean(self):
"""
A custom clean method to save category configuration information in a serializable form
"""
cleaned_data = super(CategoryLinksConfigForm, self).clean()
categories = cleaned_data.get("categories", [])
cleaned_data["categories"] = [category.pk for category in categories if hasattr(category, "pk")]
return cleaned_data
class CategoryLinksPlugin(TemplatedPlugin):
"""
A plugin for displaying links to visible categories on the shop front
"""
identifier = "category_links"
name = _("Category Links")
template_name = "shuup/xtheme/plugins/category_links.jinja"
editor_form_class = CategoryLinksConfigForm
fields = [
("title", TranslatableField(label=_("Title"), required=False, initial="")),
("show_all_categories", forms.BooleanField(
label=_("Show all categories"),
required=False,
initial=True,
help_text=_("All categories are shown, even if not selected"),
)),
"categories",
]
def get_context_data(self, context):
"""
A custom get_context_data method to return only visible categories
for request customer.
"""
selected_categories = self.config.get("categories", [])
show_all_categories = self.config.get("show_all_categories", True)
request = context.get("request")
categories = Category.objects.all_visible(
customer=getattr(request, "customer"),
shop=getattr(request, "shop")
)
if not show_all_categories:
categories = categories.filter(id__in=selected_categories)
return {
"title": self.get_translated_value("title"),
"categories": categories,
}
|
from .base import BaseHandler
class TestRoute(BaseHandler):
def get(self, file):
return self.render(str(file) + '.jade', show_h1=1)
|
import sys
import time
import sys
num = 1000
print_granularity = 1000
count = 0
first = True
start = 0
gran_start = 0
min = 0
max = 0
avg = 0
sum = 0
total = 0
def set_print_granularity(p):
global print_granularity
print_granularity = p
print("%s: print granularity = %s" % (sys.argv[0], print_granularity))
def loop_count():
global min, max, avg, total, gran_start, sum, start, first, count
now = round(time.time() * 1000)
if not first:
elapsed = now - start
if elapsed < min: min = elapsed
if elapsed > max: max = elapsed
sum = sum + elapsed
start = now
count = count + 1
total = total + 1
if count % print_granularity == 0 and not first:
gran_elapsed = now - gran_start
gran_start = now
avg = sum / print_granularity
print("%s: last %s run stats in msec \t\t elapsed = %s \t min = %s \t max = %s \t avg = %s \t\t total loops = %s" % (sys.argv[0], print_granularity, sum, min, max, avg, total))
# sys.stdout.write("-")
# sys.stdout.flush()
if first or count % print_granularity == 0:
gran_start = now
min = 10e10
max = -10e10
avg = 0
sum = 0
first = False
|
from openerp import models, fields, api
class StockPicking(models.Model):
_inherit = 'stock.picking'
carrier_price = fields.Float(string="Shipping Cost", readonly=True)
delivery_type = fields.Selection(related='carrier_id.delivery_type', readonly=True)
@api.multi
def do_transfer(self):
res = super(StockPicking, self).do_transfer()
if self.carrier_id and self.carrier_id.delivery_type != 'grid':
self.send_to_shipper()
return res
# Signature due to strange old api methods
@api.model
def _prepare_shipping_invoice_line(self, picking, invoice):
picking.ensure_one()
invoice.ensure_one()
carrier = picking.carrier_id
# No carrier
if not carrier:
return None
# Carrier already invoiced on the sale order
if any(inv_line.product_id.id == carrier.product_id.id for inv_line in invoice.invoice_line_ids):
return None
# Classic carrier
if carrier.delivery_type == 'grid':
return super(StockPicking, self)._prepare_shipping_invoice_line(picking, invoice)
# Shipping provider
price = picking.carrier_price
account_id = carrier.product_id.property_account_income.id
if not account_id:
account_id = carrier.product_id.categ_id.property_account_income_categ.id
taxes = carrier.product_id.taxes_id
taxes_ids = taxes.ids
# Apply original SO fiscal position
if picking.sale_id.fiscal_position_id:
fpos = picking.sale_id.fiscal_position_id
account_id = fpos.map_account(account_id)
taxes_ids = fpos.map_tax(taxes).ids
res = {
'name': carrier.name,
'invoice_id': invoice.id,
'uos_id': carrier.product_id.uos_id.id,
'product_id': carrier.product_id.id,
'account_id': account_id,
'price_unit': price,
'quantity': 1,
'invoice_line_tax_ids': [(6, 0, taxes_ids)],
}
return res
@api.one
def send_to_shipper(self):
res = self.carrier_id.send_shipping(self)[0]
self.carrier_price = res['exact_price']
self.carrier_tracking_ref = res['tracking_number']
msg = "Shipment sent to carrier %s for expedition with tracking number %s" % (self.carrier_id.name, self.carrier_tracking_ref)
self.message_post(body=msg)
@api.multi
def open_website_url(self):
self.ensure_one()
client_action = {'type': 'ir.actions.act_url',
'name': "Shipment Tracking Page",
'target': 'new',
'url': self.carrier_id.get_tracking_link(self)[0]
}
return client_action
@api.one
def cancel_shipment(self):
self.carrier_id.cancel_shipment(self)
msg = "Shipment %s cancelled" % self.carrier_tracking_ref
self.message_post(body=msg)
self.carrier_tracking_ref = False
|
import decimal
import pytest
from django.conf import settings
from shuup.core.models import Shipment, ShippingStatus, StockBehavior
from shuup.testing.factories import (
add_product_to_order, create_empty_order, create_product,
get_default_shop, get_default_supplier
)
from shuup.utils.excs import Problem
@pytest.mark.django_db
def test_shipment_identifier():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
shipment = order.create_shipment({line.product: 1}, supplier=supplier)
expected_key_start = "%s/%s" % (order.pk, i)
assert shipment.identifier.startswith(expected_key_start)
assert order.shipments.count() == int(line.quantity)
assert order.shipping_status == ShippingStatus.FULLY_SHIPPED # Check that order is now fully shipped
assert not order.can_edit()
@pytest.mark.django_db
def test_shipment_creation_from_unsaved_shipment():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
unsaved_shipment = Shipment(order=order, supplier=supplier)
shipment = order.create_shipment({line.product: 1}, shipment=unsaved_shipment)
expected_key_start = "%s/%s" % (order.pk, i)
assert shipment.identifier.startswith(expected_key_start)
assert order.shipments.count() == int(line.quantity)
@pytest.mark.django_db
def test_shipment_creation_without_supplier_and_shipment():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
with pytest.raises(AssertionError):
order.create_shipment({line.product: 1})
assert order.shipments.count() == 0
@pytest.mark.django_db
def test_shipment_creation_with_invalid_unsaved_shipment():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
second_order = create_empty_order(shop=shop)
second_order.full_clean()
second_order.save()
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
with pytest.raises(AssertionError):
unsaved_shipment = Shipment(supplier=supplier, order=second_order)
order.create_shipment({line.product: 1}, shipment=unsaved_shipment)
assert order.shipments.count() == 0
@pytest.mark.django_db
def test_partially_shipped_order_status():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
assert order.can_edit()
first_product_line = order.lines.exclude(product_id=None).first()
assert first_product_line.quantity > 1
order.create_shipment({first_product_line.product: 1}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert not order.can_edit()
@pytest.mark.django_db
def test_shipment_delete():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
assert order.can_edit()
first_product_line = order.lines.exclude(product_id=None).first()
assert first_product_line.quantity > 1
shipment = order.create_shipment({first_product_line.product: 1}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert order.shipments.all().count() == 1
# Test shipment delete
shipment.soft_delete()
assert order.shipments.all().count() == 1
assert order.shipments.all_except_deleted().count() == 0
# Check the shipping status update
assert order.shipping_status == ShippingStatus.NOT_SHIPPED
@pytest.mark.django_db
def test_shipment_with_insufficient_stock():
if "shuup.simple_supplier" not in settings.INSTALLED_APPS:
pytest.skip("Need shuup.simple_supplier in INSTALLED_APPS")
from shuup_tests.simple_supplier.utils import get_simple_supplier
shop = get_default_shop()
supplier = get_simple_supplier()
order = _get_order(shop, supplier, stocked=True)
product_line = order.lines.products().first()
product = product_line.product
assert product_line.quantity == 15
supplier.adjust_stock(product.pk, delta=10)
stock_status = supplier.get_stock_status(product.pk)
assert stock_status.physical_count == 10
order.create_shipment({product: 5}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert order.shipments.all().count() == 1
with pytest.raises(Problem):
order.create_shipment({product: 10}, supplier=supplier)
# Should be fine after adding more stock
supplier.adjust_stock(product.pk, delta=5)
order.create_shipment({product: 10}, supplier=supplier)
def _get_order(shop, supplier, stocked=False):
order = create_empty_order(shop=shop)
order.full_clean()
order.save()
for product_data in _get_product_data(stocked):
quantity = product_data.pop("quantity")
product = create_product(
sku=product_data.pop("sku"),
shop=shop,
supplier=supplier,
default_price=3.33,
**product_data)
add_product_to_order(order, supplier, product, quantity=quantity, taxless_base_unit_price=1)
order.cache_prices()
order.check_all_verified()
order.save()
return order
def _get_product_data(stocked=False):
return [
{
"sku": "sku1234",
"net_weight": decimal.Decimal("1"),
"gross_weight": decimal.Decimal("43.34257"),
"quantity": decimal.Decimal("15"),
"stock_behavior": StockBehavior.STOCKED if stocked else StockBehavior.UNSTOCKED
}
]
|
"""
Test cases to cover Accounts-related behaviors of the User API application
"""
import datetime
import hashlib
import json
from copy import deepcopy
from unittest import mock
import ddt
import pytz
from django.conf import settings
from django.test.testcases import TransactionTestCase
from django.test.utils import override_settings
from django.urls import reverse
from edx_name_affirmation.api import create_verified_name
from edx_name_affirmation.statuses import VerifiedNameStatus
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from common.djangoapps.student.models import PendingEmailChange, UserProfile
from common.djangoapps.student.tests.factories import TEST_PASSWORD, RegistrationFactory, UserFactory
from openedx.core.djangoapps.oauth_dispatch.jwt import create_jwt_for_user
from openedx.core.djangoapps.user_api.accounts import ACCOUNT_VISIBILITY_PREF_KEY
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from .. import ALL_USERS_VISIBILITY, CUSTOM_VISIBILITY, PRIVATE_VISIBILITY
TEST_PROFILE_IMAGE_UPLOADED_AT = datetime.datetime(2002, 1, 9, 15, 43, 1, tzinfo=pytz.UTC)
TEST_PROFILE_IMAGE_BACKEND = deepcopy(settings.PROFILE_IMAGE_BACKEND)
TEST_PROFILE_IMAGE_BACKEND['options']['base_url'] = '/profile-images/'
TEST_BIO_VALUE = "Tired mother of twins"
TEST_LANGUAGE_PROFICIENCY_CODE = "hi"
class UserAPITestCase(APITestCase):
"""
The base class for all tests of the User API
"""
VERIFIED_NAME = "Verified User"
def setUp(self):
super().setUp()
self.anonymous_client = APIClient()
self.different_user = UserFactory.create(password=TEST_PASSWORD)
self.different_client = APIClient()
self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD)
self.staff_client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default
def login_client(self, api_client, user):
"""Helper method for getting the client and user and logging in. Returns client. """
client = getattr(self, api_client)
user = getattr(self, user)
client.login(username=user.username, password=TEST_PASSWORD)
return client
def send_post(self, client, json_data, content_type='application/json', expected_status=201):
"""
Helper method for sending a post to the server, defaulting to application/json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def send_patch(self, client, json_data, content_type="application/merge-patch+json", expected_status=200):
"""
Helper method for sending a patch to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.patch(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def post_search_api(self, client, json_data, content_type='application/json', expected_status=200):
"""
Helper method for sending a post to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.search_api_url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def send_get(self, client, query_parameters=None, expected_status=200):
"""
Helper method for sending a GET to the server. Verifies the expected status and returns the response.
"""
url = self.url + '?' + query_parameters if query_parameters else self.url # pylint: disable=no-member
response = client.get(url)
assert expected_status == response.status_code
return response
# pylint: disable=no-member
def send_put(self, client, json_data, content_type="application/json", expected_status=204):
"""
Helper method for sending a PUT to the server. Verifies the expected status and returns the response.
"""
response = client.put(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
# pylint: disable=no-member
def send_delete(self, client, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(self.url)
assert expected_status == response.status_code
return response
def create_mock_profile(self, user):
"""
Helper method that creates a mock profile for the specified user
:return:
"""
legacy_profile = UserProfile.objects.get(id=user.id)
legacy_profile.country = "US"
legacy_profile.state = "MA"
legacy_profile.level_of_education = "m"
legacy_profile.year_of_birth = 2000
legacy_profile.goals = "world peace"
legacy_profile.mailing_address = "Park Ave"
legacy_profile.gender = "f"
legacy_profile.bio = TEST_BIO_VALUE
legacy_profile.profile_image_uploaded_at = TEST_PROFILE_IMAGE_UPLOADED_AT
legacy_profile.language_proficiencies.create(code=TEST_LANGUAGE_PROFICIENCY_CODE)
legacy_profile.phone_number = "+18005555555"
legacy_profile.save()
def create_mock_verified_name(self, user):
"""
Helper method to create an approved VerifiedName entry in name affirmation.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
create_verified_name(user, self.VERIFIED_NAME, legacy_profile.name, status=VerifiedNameStatus.APPROVED)
def create_user_registration(self, user):
"""
Helper method that creates a registration object for the specified user
"""
RegistrationFactory(user=user)
def _verify_profile_image_data(self, data, has_profile_image):
"""
Verify the profile image data in a GET response for self.user
corresponds to whether the user has or hasn't set a profile
image.
"""
template = '{root}/{filename}_{{size}}.{extension}'
if has_profile_image:
url_root = 'http://example-storage.com/profile-images'
filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest()
file_extension = 'jpg'
template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s"))
else:
url_root = 'http://testserver/static'
filename = 'default'
file_extension = 'png'
template = template.format(root=url_root, filename=filename, extension=file_extension)
assert data['profile_image'] == {'has_image': has_profile_image,
'image_url_full': template.format(size=50),
'image_url_small': template.format(size=10)}
@ddt.ddt
@skip_unless_lms
class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super().setUp()
self.url = reverse("own_username_api")
def _verify_get_own_username(self, queries, expected_status=200):
"""
Internal helper to perform the actual assertion
"""
with self.assertNumQueries(queries):
response = self.send_get(self.client, expected_status=expected_status)
if expected_status == 200:
data = response.data
assert 1 == len(data)
assert self.user.username == data['username']
def test_get_username(self):
"""
Test that a client (logged in) can get her own username.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self._verify_get_own_username(17)
def test_get_username_inactive(self):
"""
Test that a logged-in client can get their
username, even if inactive.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
self._verify_get_own_username(17)
def test_get_username_not_logged_in(self):
"""
Test that a client (not logged in) gets a 401
when trying to retrieve their username.
"""
# verify that the endpoint is inaccessible when not logged in
self._verify_get_own_username(13, expected_status=401)
@ddt.ddt
@skip_unless_lms
@mock.patch('openedx.core.djangoapps.user_api.accounts.image_helpers._PROFILE_IMAGE_SIZES', [50, 10])
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'small': 10},
clear=True
)
class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
TOTAL_QUERY_COUNT = 27
FULL_RESPONSE_FIELD_COUNT = 30
def setUp(self):
super().setUp()
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
self.search_api_url = reverse("accounts_search_emails_api")
def _set_user_age_to_10_years(self, user):
"""
Sets the given user's age to 10.
Returns the calculated year of birth.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
current_year = datetime.datetime.now().year
year_of_birth = current_year - 10
legacy_profile.year_of_birth = year_of_birth
legacy_profile.save()
return year_of_birth
def _verify_full_shareable_account_response(self, response, account_privacy=None, badges_enabled=False):
"""
Verify that the shareable fields from the account are returned
"""
data = response.data
assert 12 == len(data)
# public fields (3)
assert account_privacy == data['account_privacy']
self._verify_profile_image_data(data, True)
assert self.user.username == data['username']
# additional shareable fields (8)
assert TEST_BIO_VALUE == data['bio']
assert 'US' == data['country']
assert data['date_joined'] is not None
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert 'm' == data['level_of_education']
assert data['social_links'] is not None
assert data['time_zone'] is None
assert badges_enabled == data['accomplishments_shared']
def _verify_private_account_response(self, response, requires_parental_consent=False):
"""
Verify that only the public fields are returned if a user does not want to share account fields
"""
data = response.data
assert 3 == len(data)
assert PRIVATE_VISIBILITY == data['account_privacy']
self._verify_profile_image_data(data, not requires_parental_consent)
assert self.user.username == data['username']
def _verify_full_account_response(self, response, requires_parental_consent=False, year_of_birth=2000):
"""
Verify that all account fields are returned (even those that are not shareable).
"""
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
# public fields (3)
expected_account_privacy = (
PRIVATE_VISIBILITY if requires_parental_consent else
UserPreference.get_value(self.user, 'account_privacy')
)
assert expected_account_privacy == data['account_privacy']
self._verify_profile_image_data(data, not requires_parental_consent)
assert self.user.username == data['username']
# additional shareable fields (8)
assert TEST_BIO_VALUE == data['bio']
assert 'US' == data['country']
assert data['date_joined'] is not None
assert data['last_login'] is not None
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert 'm' == data['level_of_education']
assert data['social_links'] is not None
assert UserPreference.get_value(self.user, 'time_zone') == data['time_zone']
assert data['accomplishments_shared'] is not None
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
# additional admin fields (13)
assert self.user.email == data['email']
assert self.user.id == data['id']
assert self.VERIFIED_NAME == data['verified_name']
assert data['extended_profile'] is not None
assert 'MA' == data['state']
assert 'f' == data['gender']
assert 'world peace' == data['goals']
assert data['is_active']
assert 'Park Ave' == data['mailing_address']
assert requires_parental_consent == data['requires_parental_consent']
assert data['secondary_email'] is None
assert data['secondary_email_enabled'] is None
assert year_of_birth == data['year_of_birth']
def test_anonymous_access(self):
"""
Test that an anonymous client (not logged in) cannot call GET or PATCH.
"""
self.send_get(self.anonymous_client, expected_status=401)
self.send_patch(self.anonymous_client, {}, expected_status=401)
def test_unsupported_methods(self):
"""
Test that DELETE, POST, and PUT are not supported.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
assert 405 == self.client.put(self.url).status_code
assert 405 == self.client.post(self.url).status_code
assert 405 == self.client.delete(self.url).status_code
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_get_account_unknown_user(self, api_client, user):
"""
Test that requesting a user who does not exist returns a 404.
"""
client = self.login_client(api_client, user)
response = client.get(reverse("accounts_api", kwargs={'username': "does_not_exist"}))
assert 404 == response.status_code
@ddt.data(
("client", "user"),
)
@ddt.unpack
def test_regsitration_activation_key(self, api_client, user):
"""
Test that registration activation key has a value.
UserFactory does not auto-generate registration object for the test users.
It is created only for users that signup via email/API. Therefore, activation key has to be tested manually.
"""
self.create_user_registration(self.user)
client = self.login_client(api_client, user)
response = self.send_get(client)
assert response.data["activation_key"] is not None
def test_successful_get_account_by_email(self):
"""
Test that request using email by a staff user successfully retrieves Account Info.
"""
api_client = "staff_client"
user = "staff_user"
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = self.send_get(client, query_parameters=f'email={self.user.email}')
self._verify_full_account_response(response)
def test_unsuccessful_get_account_by_email(self):
"""
Test that request using email by a normal user fails to retrieve Account Info.
"""
api_client = "client"
user = "user"
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = self.send_get(
client, query_parameters=f'email={self.user.email}', expected_status=status.HTTP_403_FORBIDDEN
)
assert response.data.get('detail') == 'You do not have permission to perform this action.'
def test_successful_get_account_by_user_id(self):
"""
Test that request using lms user id by a staff user successfully retrieves Account Info.
"""
api_client = "staff_client"
user = "staff_user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={self.user.id}')
assert response.status_code == status.HTTP_200_OK
response.data = response.data[0]
self._verify_full_account_response(response)
def test_unsuccessful_get_account_by_user_id(self):
"""
Test that requesting using lms user id by a normal user fails to retrieve Account Info.
"""
api_client = "client"
user = "user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={self.user.id}')
assert response.status_code == status.HTTP_403_FORBIDDEN
assert response.data.get('detail') == 'You do not have permission to perform this action.'
@ddt.data('abc', '2f', '1.0', "2/8")
def test_get_account_by_user_id_non_integer(self, non_integer_id):
"""
Test that request using a non-integer lms user id by a staff user fails to retrieve Account Info.
"""
api_client = "staff_client"
user = "staff_user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={non_integer_id}')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_search_emails(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {'emails': [self.user.email]}
response = self.post_search_api(client, json_data=json_data)
assert response.data == [{'email': self.user.email, 'id': self.user.id, 'username': self.user.username}]
def test_search_emails_with_non_staff_user(self):
client = self.login_client('client', 'user')
json_data = {'emails': [self.user.email]}
response = self.post_search_api(client, json_data=json_data, expected_status=404)
assert response.data == {
'developer_message': "not_found",
'user_message': "Not Found"
}
def test_search_emails_with_non_existing_email(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {"emails": ['non_existant_email@example.com']}
response = self.post_search_api(client, json_data=json_data)
assert response.data == []
def test_search_emails_with_invalid_param(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {'invalid_key': [self.user.email]}
response = self.post_search_api(client, json_data=json_data, expected_status=400)
assert response.data == {
'developer_message': "'emails' field is required",
'user_message': "'emails' field is required"
}
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "all_users"})
def test_get_account_different_user_visible(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "all_users".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(self.TOTAL_QUERY_COUNT):
response = self.send_get(self.different_client)
self._verify_full_shareable_account_response(response, account_privacy=ALL_USERS_VISIBILITY)
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "private"})
def test_get_account_different_user_private(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "private".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(self.TOTAL_QUERY_COUNT):
response = self.send_get(self.different_client)
self._verify_private_account_response(response)
@mock.patch.dict(settings.FEATURES, {'ENABLE_OPENBADGES': True})
@ddt.data(
("client", "user", PRIVATE_VISIBILITY),
("different_client", "different_user", PRIVATE_VISIBILITY),
("staff_client", "staff_user", PRIVATE_VISIBILITY),
("client", "user", ALL_USERS_VISIBILITY),
("different_client", "different_user", ALL_USERS_VISIBILITY),
("staff_client", "staff_user", ALL_USERS_VISIBILITY),
)
@ddt.unpack
def test_get_account_private_visibility(self, api_client, requesting_username, preference_visibility):
"""
Test the return from GET based on user visibility setting.
"""
def verify_fields_visible_to_all_users(response):
"""
Confirms that private fields are private, and public/shareable fields are public/shareable
"""
if preference_visibility == PRIVATE_VISIBILITY:
self._verify_private_account_response(response)
else:
self._verify_full_shareable_account_response(response, ALL_USERS_VISIBILITY, badges_enabled=True)
client = self.login_client(api_client, requesting_username)
# Update user account visibility setting.
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, preference_visibility)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
response = self.send_get(client)
if requesting_username == "different_user":
verify_fields_visible_to_all_users(response)
else:
self._verify_full_account_response(response)
# Verify how the view parameter changes the fields that are returned.
response = self.send_get(client, query_parameters='view=shared')
verify_fields_visible_to_all_users(response)
response = self.send_get(client, query_parameters=f'view=shared&username={self.user.username}')
verify_fields_visible_to_all_users(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_over_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies", "name")
for field_name in shared_fields:
set_user_preference(self.user, f"visibility.{field_name}", ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
data = response.data
assert 6 == len(data)
# public fields
assert self.user.username == data['username']
assert UserPreference.get_value(self.user, 'account_privacy') == data['account_privacy']
self._verify_profile_image_data(data, has_profile_image=True)
# custom shared fields
assert TEST_BIO_VALUE == data['bio']
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
else:
self._verify_full_account_response(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_under_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
year_of_birth = self._set_user_age_to_10_years(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies")
for field_name in shared_fields:
set_user_preference(self.user, f"visibility.{field_name}", ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
self._verify_private_account_response(response, requires_parental_consent=True)
else:
self._verify_full_account_response(
response,
requires_parental_consent=True,
year_of_birth=year_of_birth,
)
def test_get_account_default(self):
"""
Test that a client (logged in) can get her own account information (using default legacy profile information,
as created by the test UserFactory).
"""
def verify_get_own_information(queries):
"""
Internal helper to perform the actual assertions
"""
with self.assertNumQueries(queries):
response = self.send_get(self.client)
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
assert self.user.username == data['username']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
for empty_field in ("year_of_birth", "level_of_education", "mailing_address", "bio"):
assert data[empty_field] is None
assert data['country'] is None
assert data['state'] is None
assert 'm' == data['gender']
assert 'Learn a lot' == data['goals']
assert self.user.email == data['email']
assert self.user.id == data['id']
assert data['date_joined'] is not None
assert data['last_login'] is not None
assert self.user.is_active == data['is_active']
self._verify_profile_image_data(data, False)
assert data['requires_parental_consent']
assert [] == data['language_proficiencies']
assert PRIVATE_VISIBILITY == data['account_privacy']
assert data['time_zone'] is None
# Badges aren't on by default, so should not be present.
assert data['accomplishments_shared'] is False
self.client.login(username=self.user.username, password=TEST_PASSWORD)
verify_get_own_information(25)
# Now make sure that the user can get the same information, even if not active
self.user.is_active = False
self.user.save()
verify_get_own_information(17)
def test_get_account_empty_string(self):
"""
Test the conversion of empty strings to None for certain fields.
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.country = ""
legacy_profile.state = ""
legacy_profile.level_of_education = ""
legacy_profile.gender = ""
legacy_profile.bio = ""
legacy_profile.save()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
with self.assertNumQueries(25):
response = self.send_get(self.client)
for empty_field in ("level_of_education", "gender", "country", "state", "bio",):
assert response.data[empty_field] is None
@ddt.data(
("different_client", "different_user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_disallowed_user(self, api_client, user):
"""
Test that a client cannot call PATCH on a different client's user account (even with
is_staff access).
"""
client = self.login_client(api_client, user)
self.send_patch(client, {}, expected_status=403)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_unknown_user(self, api_client, user):
"""
Test that trying to update a user who does not exist returns a 403.
"""
client = self.login_client(api_client, user)
response = client.patch(
reverse("accounts_api", kwargs={'username': "does_not_exist"}),
data=json.dumps({}), content_type="application/merge-patch+json"
)
assert 403 == response.status_code
@ddt.data(
("gender", "f", "not a gender", '"not a gender" is not a valid choice.'),
("level_of_education", "none", "ȻħȺɍłɇs", '"ȻħȺɍłɇs" is not a valid choice.'),
("country", "GB", "XY", '"XY" is not a valid choice.'),
("state", "MA", "PY", '"PY" is not a valid choice.'),
("year_of_birth", 2009, "not_an_int", "A valid integer is required."),
("name", "bob", "z" * 256, "Ensure this field has no more than 255 characters."),
("name", "ȻħȺɍłɇs", " ", "The name field must be at least 1 character long."),
("goals", "Smell the roses"),
("mailing_address", "Sesame Street"),
# Note that we store the raw data, so it is up to client to escape the HTML.
(
"bio", "<html>Lacrosse-playing superhero 壓是進界推日不復女</html>",
"z" * 301, "The about me field must be at most 300 characters long."
),
("account_privacy", ALL_USERS_VISIBILITY),
("account_privacy", PRIVATE_VISIBILITY),
# Note that email is tested below, as it is not immediately updated.
# Note that language_proficiencies is tested below as there are multiple error and success conditions.
)
@ddt.unpack
def test_patch_account(self, field, value, fails_validation_value=None, developer_validation_message=None):
"""
Test the behavior of patch, when using the correct content_type.
"""
client = self.login_client("client", "user")
if field == 'account_privacy':
# Ensure the user has birth year set, and is over 13, so
# account_privacy behaves normally
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.year_of_birth = 2000
legacy_profile.save()
response = self.send_patch(client, {field: value})
assert value == response.data[field]
if fails_validation_value:
error_response = self.send_patch(client, {field: fails_validation_value}, expected_status=400)
expected_user_message = 'This value is invalid.'
if field == 'bio':
expected_user_message = "The about me field must be at most 300 characters long."
assert expected_user_message == error_response.data['field_errors'][field]['user_message']
assert "Value '{value}' is not valid for field '{field}': {messages}".format(
value=fails_validation_value,
field=field,
messages=[developer_validation_message]
) == error_response.data['field_errors'][field]['developer_message']
elif field != "account_privacy":
# If there are no values that would fail validation, then empty string should be supported;
# except for account_privacy, which cannot be an empty string.
response = self.send_patch(client, {field: ""})
assert '' == response.data[field]
def test_patch_inactive_user(self):
""" Verify that a user can patch her own account, even if inactive. """
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
response = self.send_patch(self.client, {"goals": "to not activate account"})
assert 'to not activate account' == response.data['goals']
@ddt.unpack
def test_patch_account_noneditable(self):
"""
Tests the behavior of patch when a read-only field is attempted to be edited.
"""
client = self.login_client("client", "user")
def verify_error_response(field_name, data):
"""
Internal helper to check the error messages returned
"""
assert 'This field is not editable via this API' == data['field_errors'][field_name]['developer_message']
assert "The '{}' field cannot be edited.".format(
field_name
) == data['field_errors'][field_name]['user_message']
for field_name in ["username", "date_joined", "is_active", "profile_image", "requires_parental_consent"]:
response = self.send_patch(client, {field_name: "will_error", "gender": "o"}, expected_status=400)
verify_error_response(field_name, response.data)
# Make sure that gender did not change.
response = self.send_get(client)
assert 'm' == response.data['gender']
# Test error message with multiple read-only items
response = self.send_patch(client, {"username": "will_error", "date_joined": "xx"}, expected_status=400)
assert 2 == len(response.data['field_errors'])
verify_error_response("username", response.data)
verify_error_response("date_joined", response.data)
def test_patch_bad_content_type(self):
"""
Test the behavior of patch when an incorrect content_type is specified.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_patch(self.client, {}, content_type="application/json", expected_status=415)
self.send_patch(self.client, {}, content_type="application/xml", expected_status=415)
def test_patch_account_empty_string(self):
"""
Tests the behavior of patch when attempting to set fields with a select list of options to the empty string.
Also verifies the behaviour when setting to None.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
for field_name in ["gender", "level_of_education", "country", "state"]:
response = self.send_patch(self.client, {field_name: ""})
# Although throwing a 400 might be reasonable, the default DRF behavior with ModelSerializer
# is to convert to None, which also seems acceptable (and is difficult to override).
assert response.data[field_name] is None
# Verify that the behavior is the same for sending None.
response = self.send_patch(self.client, {field_name: ""})
assert response.data[field_name] is None
def test_patch_name_metadata(self):
"""
Test the metadata stored when changing the name field.
"""
def get_name_change_info(expected_entries):
"""
Internal method to encapsulate the retrieval of old names used
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
name_change_info = legacy_profile.get_meta()["old_names"]
assert expected_entries == len(name_change_info)
return name_change_info
def verify_change_info(change_info, old_name, requester, new_name):
"""
Internal method to validate name changes
"""
assert 3 == len(change_info)
assert old_name == change_info[0]
assert f'Name change requested through account API by {requester}' == change_info[1]
assert change_info[2] is not None
# Verify the new name was also stored.
get_response = self.send_get(self.client)
assert new_name == get_response.data['name']
self.client.login(username=self.user.username, password=TEST_PASSWORD)
legacy_profile = UserProfile.objects.get(id=self.user.id)
assert {} == legacy_profile.get_meta()
old_name = legacy_profile.name
# First change the name as the user and verify meta information.
self.send_patch(self.client, {"name": "Mickey Mouse"})
name_change_info = get_name_change_info(1)
verify_change_info(name_change_info[0], old_name, self.user.username, "Mickey Mouse")
# Now change the name again and verify meta information.
self.send_patch(self.client, {"name": "Donald Duck"})
name_change_info = get_name_change_info(2)
verify_change_info(name_change_info[0], old_name, self.user.username, "Donald Duck", )
verify_change_info(name_change_info[1], "Mickey Mouse", self.user.username, "Donald Duck")
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'medium': 30, 'small': 10},
clear=True
)
def test_patch_email(self):
"""
Test that the user can request an email change through the accounts API.
Full testing of the helper method used (do_email_change_request) exists in the package with the code.
Here just do minimal smoke testing.
"""
client = self.login_client("client", "user")
old_email = self.user.email
new_email = "newemail@example.com"
response = self.send_patch(client, {"email": new_email, "goals": "change my email"})
# Since request is multi-step, the email won't change on GET immediately (though goals will update).
assert old_email == response.data['email']
assert 'change my email' == response.data['goals']
# Now call the method that will be invoked with the user clicks the activation key in the received email.
# First we must get the activation key that was sent.
pending_change = PendingEmailChange.objects.filter(user=self.user)
assert 1 == len(pending_change)
activation_key = pending_change[0].activation_key
confirm_change_url = reverse(
"confirm_email_change", kwargs={'key': activation_key}
)
response = self.client.post(confirm_change_url)
assert 200 == response.status_code
get_response = self.send_get(client)
assert new_email == get_response.data['email']
@ddt.data(
("not_an_email",),
("",),
(None,),
)
@ddt.unpack
def test_patch_invalid_email(self, bad_email):
"""
Test a few error cases for email validation (full test coverage lives with do_email_change_request).
"""
client = self.login_client("client", "user")
# Try changing to an invalid email to make sure error messages are appropriately returned.
error_response = self.send_patch(client, {"email": bad_email}, expected_status=400)
field_errors = error_response.data["field_errors"]
assert "Error thrown from validate_new_email: 'Valid e-mail address required.'" == \
field_errors['email']['developer_message']
assert 'Valid e-mail address required.' == field_errors['email']['user_message']
@mock.patch('common.djangoapps.student.views.management.do_email_change_request')
def test_patch_duplicate_email(self, do_email_change_request):
"""
Test that same success response will be sent to user even if the given email already used.
"""
existing_email = "same@example.com"
UserFactory.create(email=existing_email)
client = self.login_client("client", "user")
# Try changing to an existing email to make sure no error messages returned.
response = self.send_patch(client, {"email": existing_email})
assert 200 == response.status_code
# Verify that no actual request made for email change
assert not do_email_change_request.called
def test_patch_language_proficiencies(self):
"""
Verify that patching the language_proficiencies field of the user
profile completely overwrites the previous value.
"""
client = self.login_client("client", "user")
# Patching language_proficiencies exercises the
# `LanguageProficiencySerializer.get_identity` method, which compares
# identifies language proficiencies based on their language code rather
# than django model id.
for proficiencies in ([{"code": "en"}, {"code": "fr"}, {"code": "es"}], [{"code": "fr"}], [{"code": "aa"}], []):
response = self.send_patch(client, {"language_proficiencies": proficiencies})
self.assertCountEqual(response.data["language_proficiencies"], proficiencies)
@ddt.data(
(
"not_a_list",
{'non_field_errors': ['Expected a list of items but got type "unicode".']}
),
(
["not_a_JSON_object"],
[{'non_field_errors': ['Invalid data. Expected a dictionary, but got unicode.']}]
),
(
[{}],
[{'code': ['This field is required.']}]
),
(
[{"code": "invalid_language_code"}],
[{'code': ['"invalid_language_code" is not a valid choice.']}]
),
(
[{"code": "kw"}, {"code": "el"}, {"code": "kw"}],
['The language_proficiencies field must consist of unique languages.']
),
)
@ddt.unpack
def test_patch_invalid_language_proficiencies(self, patch_value, expected_error_message):
"""
Verify we handle error cases when patching the language_proficiencies
field.
"""
expected_error_message = str(expected_error_message).replace('unicode', 'str')
client = self.login_client("client", "user")
response = self.send_patch(client, {"language_proficiencies": patch_value}, expected_status=400)
assert response.data['field_errors']['language_proficiencies']['developer_message'] == \
f"Value '{patch_value}' is not valid for field 'language_proficiencies': {expected_error_message}"
@mock.patch('openedx.core.djangoapps.user_api.accounts.serializers.AccountUserSerializer.save')
def test_patch_serializer_save_fails(self, serializer_save):
"""
Test that AccountUpdateErrors are passed through to the response.
"""
serializer_save.side_effect = [Exception("bummer"), None]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
error_response = self.send_patch(self.client, {"goals": "save an account field"}, expected_status=400)
assert "Error thrown when saving account updates: 'bummer'" == error_response.data['developer_message']
assert error_response.data['user_message'] is None
@override_settings(PROFILE_IMAGE_BACKEND=TEST_PROFILE_IMAGE_BACKEND)
def test_convert_relative_profile_url(self):
"""
Test that when TEST_PROFILE_IMAGE_BACKEND['base_url'] begins
with a '/', the API generates the full URL to profile images based on
the URL of the request.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
response = self.send_get(self.client)
assert response.data['profile_image'] == \
{'has_image': False,
'image_url_full': 'http://testserver/static/default_50.png',
'image_url_small': 'http://testserver/static/default_10.png'}
@ddt.data(
("client", "user", True),
("different_client", "different_user", False),
("staff_client", "staff_user", True),
)
@ddt.unpack
def test_parental_consent(self, api_client, requesting_username, has_full_access):
"""
Verifies that under thirteens never return a public profile.
"""
client = self.login_client(api_client, requesting_username)
year_of_birth = self._set_user_age_to_10_years(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, ALL_USERS_VISIBILITY)
# Verify that the default view is still private (except for clients with full access)
response = self.send_get(client)
if has_full_access:
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
assert self.user.username == data['username']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
assert self.user.email == data['email']
assert self.user.id == data['id']
assert year_of_birth == data['year_of_birth']
for empty_field in ("country", "level_of_education", "mailing_address", "bio", "state",):
assert data[empty_field] is None
assert 'm' == data['gender']
assert 'Learn a lot' == data['goals']
assert data['is_active']
assert data['date_joined'] is not None
assert data['last_login'] is not None
self._verify_profile_image_data(data, False)
assert data['requires_parental_consent']
assert PRIVATE_VISIBILITY == data['account_privacy']
else:
self._verify_private_account_response(response, requires_parental_consent=True)
# Verify that the shared view is still private
response = self.send_get(client, query_parameters='view=shared')
self._verify_private_account_response(response, requires_parental_consent=True)
@skip_unless_lms
class TestAccountAPITransactions(TransactionTestCase):
"""
Tests the transactional behavior of the account API
"""
def setUp(self):
super().setUp()
self.client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD)
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
@mock.patch('common.djangoapps.student.views.do_email_change_request')
def test_update_account_settings_rollback(self, mock_email_change):
"""
Verify that updating account settings is transactional when a failure happens.
"""
# Send a PATCH request with updates to both profile information and email.
# Throw an error from the method that is used to process the email change request
# (this is the last thing done in the api method). Verify that the profile did not change.
mock_email_change.side_effect = [ValueError, "mock value error thrown"]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
old_email = self.user.email
json_data = {"email": "foo@bar.com", "gender": "o"}
response = self.client.patch(self.url, data=json.dumps(json_data), content_type="application/merge-patch+json")
assert 400 == response.status_code
# Verify that GET returns the original preferences
response = self.client.get(self.url)
data = response.data
assert old_email == data['email']
assert 'm' == data['gender']
@ddt.ddt
class NameChangeViewTests(UserAPITestCase):
""" NameChangeView tests """
def setUp(self):
super().setUp()
self.url = reverse('name_change')
def test_request_succeeds(self):
"""
Test that a valid name change request succeeds.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {'name': 'New Name'})
def test_unauthenticated(self):
"""
Test that a name change request fails for an unauthenticated user.
"""
self.send_post(self.client, {'name': 'New Name'}, expected_status=401)
def test_empty_request(self):
"""
Test that an empty request fails.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {}, expected_status=400)
def test_blank_name(self):
"""
Test that a blank name string fails.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {'name': ''}, expected_status=400)
@ddt.data('<html>invalid name</html>', 'https://invalid.com')
def test_fails_validation(self, invalid_name):
"""
Test that an invalid name will return an error.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(
self.client,
{'name': invalid_name},
expected_status=400
)
@ddt.ddt
@mock.patch('django.conf.settings.USERNAME_REPLACEMENT_WORKER', 'test_replace_username_service_worker')
class UsernameReplacementViewTests(APITestCase):
""" Tests UsernameReplacementView """
SERVICE_USERNAME = 'test_replace_username_service_worker'
def setUp(self):
super().setUp()
self.service_user = UserFactory(username=self.SERVICE_USERNAME)
self.url = reverse("username_replacement")
def build_jwt_headers(self, user):
"""
Helper function for creating headers for the JWT authentication.
"""
token = create_jwt_for_user(user)
headers = {'HTTP_AUTHORIZATION': f'JWT {token}'}
return headers
def call_api(self, user, data):
""" Helper function to call API with data """
data = json.dumps(data)
headers = self.build_jwt_headers(user)
return self.client.post(self.url, data, content_type='application/json', **headers)
def test_auth(self):
""" Verify the endpoint only works with the service worker """
data = {
"username_mappings": [
{"test_username_1": "test_new_username_1"},
{"test_username_2": "test_new_username_2"}
]
}
# Test unauthenticated
response = self.client.post(self.url)
assert response.status_code == 401
# Test non-service worker
random_user = UserFactory()
response = self.call_api(random_user, data)
assert response.status_code == 403
# Test service worker
response = self.call_api(self.service_user, data)
assert response.status_code == 200
@ddt.data(
[{}, {}],
{},
[{"test_key": "test_value", "test_key_2": "test_value_2"}]
)
def test_bad_schema(self, mapping_data):
""" Verify the endpoint rejects bad data schema """
data = {
"username_mappings": mapping_data
}
response = self.call_api(self.service_user, data)
assert response.status_code == 400
def test_existing_and_non_existing_users(self):
""" Tests a mix of existing and non existing users """
random_users = [UserFactory() for _ in range(5)]
fake_usernames = ["myname_" + str(x) for x in range(5)]
existing_users = [{user.username: user.username + '_new'} for user in random_users]
non_existing_users = [{username: username + '_new'} for username in fake_usernames]
data = {
"username_mappings": existing_users + non_existing_users
}
expected_response = {
'failed_replacements': [],
'successful_replacements': existing_users + non_existing_users
}
response = self.call_api(self.service_user, data)
assert response.status_code == 200
assert response.data == expected_response
|
def _checkInput(index):
if index < 0:
raise ValueError("Indice negativo non supportato [{}]".format(index))
elif type(index) != int:
raise TypeError("Inserire un intero [tipo input {}]".format(type(index).__name__))
def fib_from_string(index):
_checkInput(index)
serie = "0 1 1 2 3 5 8".replace(" ", "")
return int(serie[index])
def fib_from_list(index):
_checkInput(index)
serie = [0,1,1,2,3,5,8]
return serie[index]
def fib_from_algo(index):
_checkInput(index)
current_number = current_index = 0
base = 1
while current_index < index:
old_base = current_number
current_number = current_number + base
base = old_base
current_index += 1
pass
return current_number
def recursion(index):
if index <= 1:
return index
return recursion(index - 1) + recursion(index - 2)
def fib_from_recursion_func(index):
_checkInput(index)
return recursion(index)
calculate = fib_from_recursion_func
|
"""
Braitenberg Vehicle2b
The more light sensed on the left side the faster the right motor moves.
The more light sensed on the right side the faster the left motor moves.
This causes the robot to turn towards a light source.
"""
from pyrobot.brain import Brain, avg
class Vehicle(Brain):
def setup(self):
self.robot.light[0].units = "SCALED"
def step(self):
leftSpeed = max([s.value for s in self.robot.light[0]["right"]])
rightSpeed = max([s.value for s in self.robot.light[0]["left"]])
print "leftSpeed, rightSpeed:", leftSpeed, rightSpeed
self.motors(leftSpeed, rightSpeed)
def INIT(engine):
if engine.robot.type not in ['K-Team', 'Pyrobot']:
raise "Robot should have light sensors!"
return Vehicle('Braitenberg2a', engine)
|
import pygtk
pygtk.require('2.0')
import pynotify
import sys
if __name__ == '__main__':
if not pynotify.init("XY"):
sys.exit(1)
n = pynotify.Notification("X, Y Test",
"This notification should point to 150, 10")
n.set_hint("x", 150)
n.set_hint("y", 10)
if not n.show():
print "Failed to send notification"
sys.exit(1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.