hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf2a97d4bd5afbbf54296fb9384bbfeab35ad83 | 5,154 | py | Python | tests/testapp/models.py | hovel/django-whatever | f5e5e3a104ef0aff5fb64a7bd54556f34793beaf | [
"MIT"
] | 3 | 2020-07-27T12:43:47.000Z | 2020-10-28T22:04:21.000Z | tests/testapp/models.py | hovel/django-whatever | f5e5e3a104ef0aff5fb64a7bd54556f34793beaf | [
"MIT"
] | null | null | null | tests/testapp/models.py | hovel/django-whatever | f5e5e3a104ef0aff5fb64a7bd54556f34793beaf | [
"MIT"
] | 1 | 2020-07-25T10:01:30.000Z | 2020-07-25T10:01:30.000Z | import datetime
import os
from decimal import Decimal
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
class CustomPermission(models.Model):
name = models.CharField(max_length=5)
class SimpleModelWithDefaults(models.Model):
big_integer_field = models.BigIntegerField(default=8223372036854775807)
char_field = models.CharField(max_length=5, default='USSR')
boolean_field = models.BooleanField(default=True)
date_field = models.DateField(default=datetime.date(2010, 12, 10))
datetime_field = models.DateTimeField(datetime.datetime.now)
decimal_field = models.DecimalField(decimal_places=2, max_digits=10, default=Decimal('0.5'))
email_field = models.EmailField(default='admin@dev.null')
float_field = models.FloatField(default=0.7)
integer_field = models.IntegerField(default=42)
ip_field = models.GenericIPAddressField(default='127.0.0.1')
null_boolead_field = models.NullBooleanField(default=None)
positive_integer_field = models.PositiveIntegerField(default=4)
small_integer = models.PositiveSmallIntegerField(default=1)
slug_field = models.SlugField(default='any_model_default')
text_field = models.TextField(default='Lorem ipsum')
time_field = models.TimeField(default=datetime.time(hour=11, minute=14))
url_field = models.URLField(default='http://yandex.ru')
class TargetModel(models.Model):
pass
class RelationshipModelsWithDefaults(models.Model):
fk = models.ForeignKey(TargetModel, on_delete=models.CASCADE, default=1, related_name='related_fk')
o2o = models.OneToOneField(TargetModel, on_delete=models.CASCADE, default=1, related_name='related_o2o')
class ModelWithConstraint(models.Model):
"""
Validates that start_time is always before end_time
"""
start_time = models.DateTimeField()
end_time = models.DateTimeField()
def clean(self):
if self.start_time > self.end_time:
raise ValidationError('start_time could not be after end_time')
class ModelWithConstraintOnForeignKey(models.Model):
timestamp = models.ForeignKey(ModelWithConstraint, on_delete=models.CASCADE)
class SimpleModel(models.Model):
big_integer_field = models.BigIntegerField()
char_field = models.CharField(max_length=5)
boolean_field = models.BooleanField()
date_field = models.DateField()
datetime_field = models.DateTimeField()
decimal_field = models.DecimalField(decimal_places=2, max_digits=10)
email_field = models.EmailField()
float_field = models.FloatField()
integer_field = models.IntegerField()
ip_field = models.GenericIPAddressField()
null_boolead_field = models.NullBooleanField()
positive_integer_field = models.PositiveIntegerField()
small_integer = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
file_field = models.FileField(upload_to='sample_subdir')
image_field = models.ImageField(upload_to='sample_subdir')
class MySlugField(models.SlugField):
pass
class ModelWithCustomField(models.Model):
slug = MySlugField()
class RelatedContentModel(models.Model):
name = models.SlugField()
class ModelWithGenericRelation(models.Model):
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def validate_even(value):
if value % 2 != 0:
raise ValidationError('%s is not an even number' % value)
class ModelWithValidatedField(models.Model):
even_field = models.PositiveIntegerField(validators=[validate_even])
class ModelUploadToString(models.Model):
file_field = models.FileField(upload_to='sample_subdir')
def callable_upload_to(instance, filename):
return os.path.join('sample_subdir', filename)
class ModelUploadToCallable(models.Model):
file_field = models.FileField(upload_to=callable_upload_to)
class RelatedModel(models.Model):
name = models.CharField(max_length=5)
class BaseModel(models.Model):
related = models.ForeignKey(RelatedModel, on_delete=models.CASCADE)
class SelfReferencingModel(models.Model):
name = models.CharField(max_length=5)
parent = models.ForeignKey('self', on_delete=models.CASCADE, null=True, blank=True)
class OneToOneRelated(models.Model):
name = models.CharField(max_length=5)
class ModelWithOneToOneField(models.Model):
name = models.CharField(max_length=5)
related = models.OneToOneField(OneToOneRelated, on_delete=models.CASCADE)
class QObjectRelated(models.Model):
pass
class RelatedToQObject(models.Model):
related = models.ForeignKey(QObjectRelated, on_delete=models.CASCADE)
class Redefined(models.Model):
name = models.CharField(max_length=5)
class RelatedToRedefined(models.Model):
related = models.ForeignKey(Redefined, on_delete=models.CASCADE)
| 32.620253 | 108 | 0.766395 |
acf2aa8135cafcca791a06796437c5a0e9406337 | 241 | py | Python | pyredner/area_light.py | swordigo1995/redner | 195696deb4269447b7e4648d6285ab34da2ed24f | [
"MIT"
] | null | null | null | pyredner/area_light.py | swordigo1995/redner | 195696deb4269447b7e4648d6285ab34da2ed24f | [
"MIT"
] | null | null | null | pyredner/area_light.py | swordigo1995/redner | 195696deb4269447b7e4648d6285ab34da2ed24f | [
"MIT"
] | null | null | null | class AreaLight:
def __init__(self, shape_id, intensity, two_sided = False, hide_shape = False):
self.shape_id = shape_id
self.intensity = intensity
self.two_sided = two_sided
self.hide_shape = hide_shape
| 34.428571 | 83 | 0.676349 |
acf2aab749bdb4d2ba69b90a4f749c2b8d9bc2a5 | 16,116 | py | Python | mungo/diagram/__init__.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | 1 | 2015-09-16T07:53:18.000Z | 2015-09-16T07:53:18.000Z | mungo/diagram/__init__.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | null | null | null | mungo/diagram/__init__.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | 3 | 2016-01-02T16:34:59.000Z | 2021-07-21T04:21:55.000Z | """
diagram module
"""
from reportlab.graphics import renderPDF, renderSVG
from reportlab.lib import colors
from reportlab.graphics.shapes import *
from reportlab.graphics.charts.textlabels import Label
from Glyphs import *
DPI = 72
class A4Portrait:
"""A4 portrait dimensions"""
margin = DPI
height = 841.89 # 24*DPI/2.54
width = 595.28 # 18*DPI/2.54
class A4Landscape:
"""A4 landscape dimensions"""
margin = DPI
height = 595.28 # 18/2.54*DPI
width = 841.89 # 24/2.54*DPI
class Mapping:
"""Define a map between pixels and feature coordinates"""
def __init__(self, x0, x1, start, end, flipped=False):
"""Constructor
@param x0: Left-most pixel coord
@param x1: Right-most pixel coord
@param start: Left-most feature coord
@param end: Right-most feature corrd
@param flipped: Flip coords (default: False)
"""
self.x0 = x0
self.x1 = x1
self.start = start
self.end = end
self.scale = float(self.x1-self.x0)/float(self.end-self.start)
self.flipped = flipped
if flipped:
self.x0,self.x1 = self.x1,self.x0
self.scale *= -1
def flip(self):
"""Flip coords"""
self.flipped = not self.flipped
self.x0,self.x1 = self.x1,self.x0
self.scale *= -1
def __call__(self, i):
"""Return """
return self.x0 + self.scale*(i-self.start)
def shift(self, dx, newStart):
"""Shift page view; end-start is preserved; scale is not changed
@param dx: Pixels to shift x0 & x1 by
@param newStart: New feature start coord
"""
self.x0 += dx
self.x1 += dx
L = self.end-self.start
self.start = newStart
self.end = newStart+L
@staticmethod
def fromScale(x0, start, end, scale, flipped=False):
"""Static method to construct a Mapping object with a fixed scale
@param x0: Left-most pixel coord
@param start: Left-most feature coord
@param end: Right-most feature coord
@param scale: Scale ie (x1-x0)/(end-start)
@param flipped: Flip coords (default: False)
"""
x1 = int(x0 + scale*(end-start))
m = Mapping(x0, x1, start, end, flipped=flipped)
return m
class UniformMapping:
"""Define a map for uniformly spaced genes"""
def __init__(self, x0, x1, genes, flipped=False):
"""Constructor
@param x0: Left-most pixel coord
@param x1: Right-most pixel coord
@param genes: List of genes. Each gene must be an object with start, end & strand attributes
@param flipped: Flip coords (default: False)
"""
self.x0 = x0
self.x1 = x1
self.flipped = flipped
self.starts = [g.start for g in genes]
self.starts.sort()
if self.flipped: self.starts.reverse()
self.positions = dict(zip(self.starts, range(len(self.starts))))
self.start = self.starts[0]
self.end = self.starts[-1]
def flip(self):
"""Flip the coords"""
self.flipped = not self.flipped
self.starts = self.positions.keys()
self.starts.sort()
if self.flipped: self.starts.reverse()
self.positions = dict(zip(self.starts, range(len(self.starts))))
def __call__(self, start):
"""Return the pixel position of the gene at position start"""
return self.x0 + self.positions[start]/float(len(self.positions))*(self.x1-self.x0)
def __getitem__(self, i):
"""Return the pixel position of the i^th gene"""
start = self.starts[i]
return self.x0 + self.positions[start]/float(len(self.positions))*(self.x1-self.x0)
class UniformMapping2:
def __init__(self, x0, x1, genes, flipped=False):
self.x0 = x0
self.x1 = x1
self.flipped = flipped
n = 2*len(genes)
dx = (x1-x0)/float(n-1)
self.pos = []
self.x = []
for i,g in enumerate(genes):
self.pos.append(g.start)
self.pos.append(g.end)
self.x.append(x0 + i*dx)
self.x.append(x0 + (i+1)*dx)
self.pos.sort()
if self.flipped: self.pos.reverse()
self.start = self.pos[0]
self.end = self.pos[-1]
def flip(self):
self.flipped = not self.flipped
def __call__(self, p):
for i in xrange(len(self.pos)):
if p<self.pos[i]: break
return self.x[i-1] + (self.x[i]-self.x[i-1])/(self.pos[i]-self.pos[i-1])*(p-self.pos[i-1])
def addHRule(drawing, x1, x2, y, strokeColor=colors.black, strokeWidth=0.5):
"""Add a horizontal line to the drawing.
@param drawing:
@param x1:
@param x2:
@param y:
@param strokeColor:
@param strokeWidth:
"""
line = Line(x1, y, x2, y, strokeColor=strokeColor, strokeWidth=strokeWidth)
drawing.add(line)
def addScale(drawing, xmap, y, start, end, tickLen=10, dx=3, dy=6,
textAnchor='middle', boxAnchor='s', fontSize=12,
strokeWidth=1, strokeColor=colors.black, scale=1.0, format='%ibp'):
x1 = xmap(start)
x2 = xmap(end)
line = Line(x1+dx,y,x2-dx,y,
strokeWidth=strokeWidth, strokeColor=strokeColor)
drawing.add(line)
leftTick = Line(x1+dx,y-0.5*tickLen,x1+dx,y+0.5*tickLen,
strokeWidth=strokeWidth, strokeColor=strokeColor)
drawing.add(leftTick)
rightTick = Line(x2-dx,y-0.5*tickLen,x2-dx,y+0.5*tickLen,
strokeWidth=strokeWidth, strokeColor=strokeColor)
drawing.add(rightTick)
label = Label()
label.setOrigin(0.5*(x1+x2), y+dy)
distance = float(end-start)/scale
label.setText(format % (distance/scale))
label.fontSize = fontSize
label.textAnchor = textAnchor
label.boxAnchor = boxAnchor
drawing.add(label)
addRuler = addScale
def tick_generator(start, end, n=10, convert=None):
"""Generate tick positions for addAxis"""
dp = float(end-start)/n
for i in xrange(n+1):
t = start + i*dp
if convert:
t = convert(t)
yield t
def addAxis(drawing, xmap, y, strokeWidth=1, minorStrokeWidth=0.5,
tickDir='down', autoTicks=False, nTicks=20, tickLen=5, fontSize=10, nMinorTicks=80,
minorTickLen=2, angle=0, dx=0, dy=-2, textAnchor='middle', boxAnchor=None,
scale=1.0, format='%i'):
"""Add a horizontal axis to the drawing.
To do: Round tick positions
"""
line = Line(xmap.x0, y, xmap.x1, y, strokeWidth=strokeWidth)
drawing.add(line)
if not boxAnchor:
if tickDir=='down':
boxAnchor = 'n'
else:
boxAnchor = 's'
signum = {'up': -1, 'down': 1}[tickDir]
if nTicks>0:
ticks = tick_generator(xmap.start, xmap.end, n=nTicks, convert=int)
for p in ticks:
x = xmap(p)
line = Line(x, y, x, y-signum*tickLen, strokeWidth=strokeWidth)
drawing.add(line)
s = Label()
s.setOrigin(x, y-signum*tickLen)
s.setText(format % (p/scale))
s.dx = dx
s.dy = signum*dy
s.fontName = 'Helvetica'
s.fontSize = fontSize
s.textAnchor = textAnchor
s.boxAnchor = boxAnchor
s.angle = angle
drawing.add(s)
minorticks = tick_generator(xmap.start, xmap.end, n=nMinorTicks, convert=int)
for p in minorticks:
x = xmap(p)
line = Line(x, y, x, y-signum*minorTickLen, strokeWidth=minorStrokeWidth)
drawing.add(line)
def addUniformAxis(self, xmap, y):
"""Add a horizontal axis suitable for uniformly a spaced gene map to the drawing.
Not yet finished.
"""
line = Line(xmap.x0, y, xmap.x1, y, strokeWidth=strokeWidth)
drawing.add(line)
def addLabel(drawing, x, y, text, fontName='Helvetica', fontSize=11, dy=0,
angle=0, boxAnchor='sw', textAnchor='start'):
"""Add a label to the drawing.
This interface here is inconsistent in that it requires pixel coords. FIX
This just sets convenient defaults for Label."""
label = Label()
label.setText(text)
label.setOrigin(x, y)
label.fontName = fontName
label.fontSize = fontSize
label.boxAnchor = boxAnchor
label.textAnchor = textAnchor
label.dy = dy
label.angle = angle
drawing.add(label)
def addBlock(drawing, xmap, y, block, height=10, width=None, fillColor=colors.red, strokeColor=None):
"""Add a colored block to the drawing.
This just sets convenient defaults for Rect."""
x = xmap(block.start)
if not width:
width = xmap(block.end)-xmap(block.start)
r = Rect(x,y,width,height,strokeColor=strokeColor,fillColor=fillColor)
drawing.add(r)
def addFixedLengthFeature(drawing, xmap, y, feature, length, glyph=Arrow,
fillColor=colors.red, strokeColor=None,
height=20, fontSize=14, labeldy=3, labelAngle=90, textAnchor='start',
boxAnchor='w', aspectRatio=0.5, wmin=3, wNoTail=6):
x1 = xmap(feature.start)
x2 = x1 + length
if feature.strand=='-':
x1,x2 = x2,x1
g = glyph()
g.x = x1
g.y = y+height/2
g.height = height
g.length = x2-x1
g.fillColor = fillColor
if strokeColor:
g.strokeColor = strokeColor
else:
g.strokeColor = fillColor
g.fontSize = fontSize
g.label = feature.name
g.labeldy = labeldy
g.labelAngle = labelAngle
g.textAnchor = textAnchor
g.boxAnchor = boxAnchor
g.aspectRatio = aspectRatio
g.wmin = wmin
g.wNoTail = wNoTail
drawing.add(g)
def addFeature(drawing, xmap, y, feature, glyph=Arrow,
fillColor=colors.red, strokeColor=None, strokeWidth=0,
height=20, fontSize=14, labeldx=0, labeldy=3, labelAngle=90, textAnchor='start',
boxAnchor='w', aspectRatio=0.5, wmin=3, wNoTail=6):
"""Adds a feature (typically an arrow) with label to the drawing"""
if feature.strand=='+':
x1,x2 = xmap(feature.start), xmap(feature.end)
else:
x2,x1 = xmap(feature.start), xmap(feature.end)
g = glyph()
g.x = x1
g.y = y+height/2
g.height = height
g.length = x2-x1
g.fillColor = fillColor
g.strokeWidth = strokeWidth
if strokeColor:
g.strokeColor = strokeColor
else:
g.strokeColor = fillColor
g.fontSize = fontSize
g.label = feature.name
g.labeldx = labeldx
g.labeldy = labeldy
g.labelAngle = labelAngle
g.textAnchor = textAnchor
g.boxAnchor = boxAnchor
g.aspectRatio = aspectRatio
g.wmin = wmin
g.wNoTail = wNoTail
drawing.add(g)
def addCompoundFeature(drawing, xmap, y, gene,
strokeColor=None, fillColor=colors.blue,
intronColor=colors.blue, intronWidth=0.5,
glyph=Block, height=12, utrHeight=6,
labeldy=10, fontSize=10, textAnchor='middle', boxAnchor='s'):
"""Adds a compund feature to the drawing.
A compound feature is typically several exons joined by zig-zag lines."""
rise = height + utrHeight
intronStarts = [None]
intronEnds = []
heights = []
for exon in gene:
x1,x2 = xmap(exon.start), xmap(exon.end)
kind = exon.kind.lower()
if kind in ['exon', 'utr']:
intronStarts.append(exon.end)
intronEnds.append(exon.start)
g = glyph()
g.x = x1
g.y = y+height/2
if exon.kind.lower()=='exon':
g.height = height
heights.append(height)
else:
g.height = utrHeight
heights.append(utrHeight)
g.length = x2-x1
g.fillColor = fillColor
if strokeColor:
g.strokeColor = strokeColor
else:
g.strokeColor = fillColor
g.fontSize = fontSize
drawing.add(g)
for i,(intronStart,intronEnd) in enumerate(zip(intronStarts[1:], intronEnds[1:])):
x1 = xmap(intronStart)
x2 = xmap(0.5*(intronStart+intronEnd))
x3 = xmap(intronEnd)
# if abs(x3-x1)<3: continue
# print intronStart,intronEnd,heights[i],heights[i+1]
y1 = y+heights[i]/2+height/2
y2 = y+rise
y3 = y+heights[i+1]/2+height/2
line1 = Line(x1,y1,x2,y2,strokeColor=intronColor,strokeWidth=intronWidth)
line2 = Line(x2,y2,x3,y3,strokeColor=intronColor,strokeWidth=intronWidth)
drawing.add(line1)
drawing.add(line2)
# Draw arrows
if xmap.flipped:
signum = -1
else:
signum = 1
if gene.strand=='+':
x1 = xmap(gene.end)
x2 = x1 + signum*15
x3 = x1 + signum*10
y1 = y + 0.5*height
y2 = y + 0.75*height
y3 = y + 0.25*height
line1 = Line(x1,y1,x2,y1,strokeColor=intronColor,strokeWidth=intronWidth)
line2 = Line(x2,y1,x3,y2,strokeColor=intronColor,strokeWidth=intronWidth)
line3 = Line(x2,y1,x3,y3,strokeColor=intronColor,strokeWidth=intronWidth)
drawing.add(line1)
drawing.add(line2)
drawing.add(line3)
else:
x1 = xmap(gene.start)
x2 = x1 - signum*15
x3 = x1 - signum*10
y1 = y + 0.5*height
y2 = y + 0.75*height
y3 = y + 0.25*height
line1 = Line(x1,y1,x2,y1,strokeColor=intronColor,strokeWidth=intronWidth)
line2 = Line(x2,y1,x3,y2,strokeColor=intronColor,strokeWidth=intronWidth)
line3 = Line(x2,y1,x3,y3,strokeColor=intronColor,strokeWidth=intronWidth)
drawing.add(line1)
drawing.add(line2)
drawing.add(line3)
# if gene has attribute name...
label = Label()
label.setText(gene.name)
pos = 0.5*(gene.start+gene.end)
x = xmap(pos)
label.setOrigin(x,y)
label.dy = labeldy
label.textAnchor = textAnchor
label.boxAnchor = boxAnchor
drawing.add(label)
def addPointyCompoundFeature(drawing, xmap, y, gene,
strokeColor=None, fillColor=colors.blue, intronColor=colors.blue,
glyph=PointyBlock, height=12, utrHeight=6, rise=8,
labeldy=10, fontSize=10, textAnchor='middle', boxAnchor='s'):
"""Adds a pointy compound feature to the drawing. This is typically
several exons joined by zig-zag lines with an arrow showing strand."""
if gene.strand=='+':
x1,x2 = xmap(gene.start), xmap(gene.end)
else:
x2,x1 = xmap(gene.start), xmap(gene.end)
y = y+height/2
y1 = y
line = Line(x1,y1,x2,y1,strokeColor=intronColor)
drawing.add(line)
for exon in gene:
if exon.strand=='+':
x1,x2 = xmap(exon.start), xmap(exon.end)
else:
x2,x1 = xmap(exon.start), xmap(exon.end)
g = glyph()
g.x = x1
g.y = y
if exon.kind.lower()=='utr':
g.height = utrHeight
else:
g.height = height
g.length = x2-x1
g.fillColor = fillColor
if strokeColor:
g.strokeColor = strokeColor
else:
g.strokeColor = fillColor
g.fontSize = fontSize
drawing.add(g)
label = Label()
label.setText(gene.name)
x = 0.5*(gene.start+gene.end)
label.setOrigin(x,y)
label.dy = labeldy
label.textAnchor = textAnchor
label.boxAnchor = boxAnchor
drawing.add(label)
def addCropMarks(drawing, xsize, ysize, margin, L=10, verbose=False):
"""Add crop marks to the drawing. This is helpful for lining up several pdfs in Illustrator."""
x1 = margin
x2 = xsize-margin
y1 = margin
y2 = ysize-margin
if verbose:
print x1,y1
print x2,y2
drawing.add(Line(x1,y1,x1+L,y1,strokeColor=colors.black))
drawing.add(Line(x1,y1,x1,y1+L,strokeColor=colors.black))
drawing.add(Line(x2,y2,x2-L,y2,strokeColor=colors.black))
drawing.add(Line(x2,y2,x2,y2-L,strokeColor=colors.black))
drawing.add(Line(x1,y2,x1+L,y2,strokeColor=colors.black))
drawing.add(Line(x1,y2,x1,y2-L,strokeColor=colors.black))
drawing.add(Line(x2,y1,x2-L,y1,strokeColor=colors.black))
drawing.add(Line(x2,y1,x2,y1+L,strokeColor=colors.black))
| 30.992308 | 101 | 0.605361 |
acf2ab8b4b60de207e95b1b7b8bf5b741e8bf076 | 1,735 | py | Python | benchmark/startPyquil2066.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil2066.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil2066.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=34
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(3) # number=19
prog += CZ(0,3) # number=20
prog += H(3) # number=21
prog += H(3) # number=24
prog += CZ(0,3) # number=25
prog += H(3) # number=26
prog += X(3) # number=17
prog += CNOT(0,3) # number=18
prog += H(3) # number=31
prog += CZ(0,3) # number=32
prog += H(3) # number=33
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Y(3) # number=12
prog += H(0) # number=5
prog += H(1) # number=6
prog += Y(1) # number=29
prog += H(2) # number=7
prog += H(1) # number=30
prog += H(3) # number=8
prog += H(0) # number=9
prog += Y(2) # number=10
prog += Y(2) # number=11
prog += SWAP(3,0) # number=22
prog += SWAP(3,0) # number=23
prog += SWAP(1,0) # number=27
prog += SWAP(1,0) # number=28
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil2066.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 24.785714 | 64 | 0.565994 |
acf2ac3c4816d71a55662bff20b51d99d6c215bd | 2,495 | py | Python | main.py | zoni/ulauncher-hex | 3fd2464f1f819d5a283649b20c11b2493cf8a1be | [
"MIT"
] | null | null | null | main.py | zoni/ulauncher-hex | 3fd2464f1f819d5a283649b20c11b2493cf8a1be | [
"MIT"
] | null | null | null | main.py | zoni/ulauncher-hex | 3fd2464f1f819d5a283649b20c11b2493cf8a1be | [
"MIT"
] | null | null | null | import gi; gi.require_version('Gdk', '3.0') # https://github.com/Ulauncher/Ulauncher/issues/703
from ulauncher.api.client.Extension import Extension
from ulauncher.api.client.EventListener import EventListener
from ulauncher.api.shared.event import KeywordQueryEvent, ItemEnterEvent
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction
from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction
import requests
class HexExtension(Extension):
def __init__(self):
super(HexExtension, self).__init__()
self.subscribe(KeywordQueryEvent, KeywordQueryEventListener())
class KeywordQueryEventListener(EventListener):
def on_event(self, event, extension):
query = event.get_argument() or str()
if len(query.strip()) == 0:
return RenderResultListAction([
ExtensionResultItem(icon='images/icon.png',
name='Open Elixir docs',
on_enter=OpenUrlAction('https://hexdocs.pm/elixir'))
])
else:
search_url = 'https://hex.pm/api/packages?search=%s&sort=recent_downloads'
api_results = requests.get(search_url % query).json()
result_items = list(map(self.build_result_item, api_results))
return RenderResultListAction(result_items)
def build_result_item(self, package):
primary_action = OpenUrlAction(package['html_url'])
if (package['docs_html_url'] != None):
options = [
ExtensionResultItem(icon='images/hex.png',
name='View package',
on_enter=OpenUrlAction(package['html_url'])),
ExtensionResultItem(icon='images/hexdocs.png',
name='View documentation',
on_enter=OpenUrlAction(package['docs_html_url']))
]
primary_action = RenderResultListAction(options)
return ExtensionResultItem(icon='images/icon.png',
name=package['name'],
description=package['meta']['description'],
on_enter=primary_action)
if __name__ == '__main__':
HexExtension().run()
| 40.241935 | 95 | 0.625651 |
acf2ac5a0363989ebfa9f7f16ca8dfabf4a92946 | 1,107 | py | Python | applications/zcomx/modules/facepy-1.0.6/exceptions.py | zcomx/zco.mx | 70a7372af5787c2e4dea14b25bab0bbb2b959881 | [
"BSD-3-Clause"
] | null | null | null | applications/zcomx/modules/facepy-1.0.6/exceptions.py | zcomx/zco.mx | 70a7372af5787c2e4dea14b25bab0bbb2b959881 | [
"BSD-3-Clause"
] | null | null | null | applications/zcomx/modules/facepy-1.0.6/exceptions.py | zcomx/zco.mx | 70a7372af5787c2e4dea14b25bab0bbb2b959881 | [
"BSD-3-Clause"
] | null | null | null | class FacepyError(Exception):
"""Base class for exceptions raised by Facepy."""
class FacebookError(FacepyError):
"""Exception for Facebook errors."""
def __init__(self, message=None, code=None, error_data=None, error_subcode=None,
is_transient=None, error_user_title=None, error_user_msg=None):
self.message = message
self.code = code
self.error_data = error_data
self.error_subcode = error_subcode
self.is_transient = is_transient
self.error_user_title = error_user_title
self.error_user_msg = error_user_msg
if self.code:
message = '[%s] %s' % (self.code, self.message)
super(FacebookError, self).__init__(message)
class OAuthError(FacebookError):
"""Exception for Facebook errors specifically related to OAuth."""
class HTTPError(FacepyError):
"""Exception for transport errors."""
class SignedRequestError(FacepyError):
"""Exception for invalid signed requests."""
class InternalFacebookError(FacebookError):
"""Exception for Facebook internal server error."""
| 29.918919 | 84 | 0.692864 |
acf2ac7085ed715f73c1b35118eb668ce31cf35d | 2,293 | py | Python | plasmaboundaries/magnetic_flux.py | Shimwell/plasma-boundaries | 68a186833c1a9a35fc768b042eaba76ef325c99d | [
"MIT"
] | null | null | null | plasmaboundaries/magnetic_flux.py | Shimwell/plasma-boundaries | 68a186833c1a9a35fc768b042eaba76ef325c99d | [
"MIT"
] | null | null | null | plasmaboundaries/magnetic_flux.py | Shimwell/plasma-boundaries | 68a186833c1a9a35fc768b042eaba76ef325c99d | [
"MIT"
] | null | null | null | import numpy as np
import sympy as sp
def derivatives(f, order):
"""Computes the derivatives of function.
Does not computes xy or yx derivatives.
Args:
f (callable f(x, y, c_i, pkg)): function
order (int): order of differenciation
Returns:
(sympy.Add, sympy.Add): (fx^order, fy^order)
"""
x, y = sp.symbols("x y")
f_sp = f(x=x, y=y)
f_x = sp.diff(f_sp, *[x for i in range(order)])
f_y = sp.diff(f_sp, *[y for i in range(order)])
return f_x, f_y
def psi(X, Y, c_i, A, config, pkg='numpy'):
"""Computes the value of magnetic flux at point (X, Y)
according to coefficients ci.
Args:
X (float or numpy.array): x coordinate
Y (float or numpy.array): y coordinate
c_i (list): list of floats, the ci coefficients
A (float): plasma parameter
config (str): shape of the plasma 'non-null', 'single-null',
'double-null'.
pkg (str, optional): if set to 'numpy' (resp. 'sympy'), numpy
(resp. sympy) objects will be used. Defaults to 'numpy'.
Raises:
ValueError: If argument pkg is not in ['numpy', 'np', 'sympy', 'sp']
Returns:
float or numpy.array or sympy.Add: value(s) of magnetic flux
"""
if pkg in ['numpy', 'np']:
pkg = np
elif pkg in ['sympy', 'sp']:
pkg = sp
else:
raise ValueError("Unexpected string for argument pkg")
psi_1 = 1
psi_2 = X**2
psi_3 = Y**2 - X**2*pkg.log(X)
psi_4 = X**4 - 4*X**2*Y**2
psi_5 = 2*Y**4 - 9*Y**2*X**2 + 3*X**4*pkg.log(X) - 12*X**2*Y**2*pkg.log(X)
psi_6 = X**6 - 12*X**4*Y**2 + 8*X**2*Y**4
psi_7 = 8*Y**6 - 140*Y**4*X**2 + 75*Y**2*X**4 - 15*X**6*pkg.log(X) + \
180*X**4*Y**2*pkg.log(X) - 120*X**2*Y**4*pkg.log(X)
psis = [psi_1, psi_2, psi_3, psi_4, psi_5, psi_6, psi_7]
if config == 'single-null':
psi_8 = Y
psi_9 = Y*X**2
psi_10 = Y**3 - 3*Y*X**2*pkg.log(X)
psi_11 = 3*Y*X**4 - 4*Y**3*X**2
psi_12 = 8*Y**5 - 45*Y*X**4 - 80*Y**3*X**2*pkg.log(X) + \
60*Y*X**4*pkg.log(X)
psis += [psi_8, psi_9, psi_10, psi_11, psi_12]
val = X**4/8 + A*(1/2*X**2*pkg.log(X) - X**4/8) + \
sum([c_i[i]*psis[i] for i in range(len(c_i))])
return val
| 32.295775 | 78 | 0.536415 |
acf2ae381a90a375e657ab9a0374e567feb7a2b3 | 2,645 | py | Python | apps/shipments/serializers/shipment_action.py | kevingduck/transmission | c29ae529c02c885cdb0e64a35d7d4750ab1b8001 | [
"Apache-2.0"
] | 19 | 2018-09-04T14:49:01.000Z | 2020-06-09T22:13:10.000Z | apps/shipments/serializers/shipment_action.py | kevingduck/transmission | c29ae529c02c885cdb0e64a35d7d4750ab1b8001 | [
"Apache-2.0"
] | 50 | 2018-09-18T17:28:57.000Z | 2021-01-09T16:18:45.000Z | apps/shipments/serializers/shipment_action.py | kevingduck/transmission | c29ae529c02c885cdb0e64a35d7d4750ab1b8001 | [
"Apache-2.0"
] | 4 | 2019-12-15T13:44:18.000Z | 2021-06-09T20:39:54.000Z | """
Copyright 2019 ShipChain, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from datetime import datetime, timezone
from functools import partial
from django.conf import settings
from django_fsm import can_proceed
from enumfields import Enum
from rest_framework import exceptions
from rest_framework_json_api import serializers
from shipchain_common.authentication import is_internal_call
from shipchain_common.utils import UpperEnumField
from apps.shipments.models import Shipment, TransitState
class ActionType(Enum):
PICK_UP = partial(Shipment.pick_up)
ARRIVAL = partial(Shipment.arrival)
DROP_OFF = partial(Shipment.drop_off)
class ShipmentActionRequestSerializer(serializers.Serializer):
action_type = UpperEnumField(ActionType, lenient=True, ints_as_names=True)
tracking_data = serializers.CharField(required=False, allow_null=True)
document_id = serializers.CharField(required=False, allow_null=True)
raw_asset_physical_id = serializers.CharField(required=False, allow_null=True)
asset_physical_id = serializers.CharField(required=False, allow_null=True)
action_timestamp = serializers.DateTimeField(required=False)
def validate_action_type(self, action_type):
shipment = self.context['shipment']
action_type.value.func.__self__ = shipment # Hack for getting dynamic partial funcs to work w/ can_proceed
if not can_proceed(action_type.value.func):
# Bad state transition
raise exceptions.ValidationError(f'Action {action_type.name} not available while Shipment '
f'is in state {TransitState(shipment.state).name}')
return action_type
def validate_action_timestamp(self, action_timestamp):
if settings.PROFILES_ENABLED and not is_internal_call(self.context['request'], 'third-party-integrator'):
raise exceptions.ValidationError('Can only manually set timestamp for action on internal calls')
if action_timestamp > datetime.now(timezone.utc):
raise exceptions.ValidationError('Cannot set action for datetime in the future.')
return action_timestamp
| 44.830508 | 115 | 0.765974 |
acf2aeb24c21e5ad9f7d0468dd4de27907109e72 | 4,042 | py | Python | jpa/eclipselink.jpa.test/resource/weblogic/wls_exalogic_setup.py | Pandrex247/patched-src-eclipselink | 10bbc58df62fb4f4f7ac3d8cc531263d374f0d72 | [
"BSD-3-Clause"
] | null | null | null | jpa/eclipselink.jpa.test/resource/weblogic/wls_exalogic_setup.py | Pandrex247/patched-src-eclipselink | 10bbc58df62fb4f4f7ac3d8cc531263d374f0d72 | [
"BSD-3-Clause"
] | 2 | 2021-03-24T17:58:46.000Z | 2021-12-14T20:59:52.000Z | jpa/eclipselink.jpa.test/resource/weblogic/wls_exalogic_setup.py | Pandrex247/patched-src-eclipselink | 10bbc58df62fb4f4f7ac3d8cc531263d374f0d72 | [
"BSD-3-Clause"
] | null | null | null | ############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_setup.py
# Properties : weblogic.properties test.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Connect to wls server
#===========================================================================
connect('@WL_USR@','@WL_PWD@','t3://@WL_HOST@:@WL_PORT@')
#===========================================================================
# Create and configure JTA Data Source and target it to the server.
#===========================================================================
edit()
startEdit()
cd('/')
cmo.createJDBCSystemResource('EclipseLinkDS')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS')
cmo.setName('EclipseLinkDS')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDataSourceParams/EclipseLinkDS')
set('JNDINames',jarray.array([String('jdbc/EclipseLinkDS')], String))
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS')
cmo.setUrl('@DBURL@')
cmo.setDriverName('@DBDRV@')
set('PasswordEncrypted','@DBPWD@')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCConnectionPoolParams/EclipseLinkDS')
cmo.setTestTableName('SQL SELECT 1 FROM DUAL')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS')
cmo.createProperty('user')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS/Properties/user')
cmo.setValue('@DBUSR@')
#cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDataSourceParams/EclipseLinkDS')
#cmo.setGlobalTransactionsProtocol('OnePhaseCommit')
cd('/SystemResources/EclipseLinkDS')
set('Targets',jarray.array([ObjectName('com.bea:Name=@TARGET_SERVER@,Type=Server')], ObjectName))
save()
activate()
#===========================================================================
# Create and configure Non-JTA Data Source and target it to the server.
#===========================================================================
edit()
startEdit()
cd('/')
cmo.createJDBCSystemResource('ELNonJTADS')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS')
cmo.setName('ELNonJTADS')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDataSourceParams/ELNonJTADS')
set('JNDINames',jarray.array([String('jdbc/ELNonJTADS')], String))
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS')
cmo.setUrl('@DBURL@')
cmo.setDriverName('@DBDRV@')
set('PasswordEncrypted','@DBPWD@')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCConnectionPoolParams/ELNonJTADS')
cmo.setTestTableName('SQL SELECT 1 FROM DUAL')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS')
cmo.createProperty('user')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS/Properties/user')
cmo.setValue('@DBUSR@')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDataSourceParams/ELNonJTADS')
cmo.setGlobalTransactionsProtocol('None')
cd('/SystemResources/ELNonJTADS')
set('Targets',jarray.array([ObjectName('com.bea:Name=@TARGET_SERVER@,Type=Server')], ObjectName))
save()
activate()
#===========================================================================
# Enable Exalogic Optimization
#===========================================================================
edit()
startEdit()
cd('/')
cmo.setExalogicOptimizationsEnabled(true)
save()
activate()
#===========================================================================
# Exit WLST.
#===========================================================================
exit()
| 36.089286 | 139 | 0.616774 |
acf2af31c5f33b4b3b82d48fc1f4224ee6025717 | 1,405 | py | Python | mediagoblin/plugins/subtitles/forms.py | gnu-mirror-unofficial/mediagoblin | 522a61b24a1b7767682eaf7b29c59e40a0a9b73f | [
"CC0-1.0"
] | 1 | 2021-09-21T02:24:43.000Z | 2021-09-21T02:24:43.000Z | mediagoblin/plugins/subtitles/forms.py | jgarte/mediagoblin-mirror | c4599508b02f2e61df3a97ff314766a62a3e5934 | [
"CC0-1.0"
] | null | null | null | mediagoblin/plugins/subtitles/forms.py | jgarte/mediagoblin-mirror | c4599508b02f2e61df3a97ff314766a62a3e5934 | [
"CC0-1.0"
] | 1 | 2021-09-21T02:25:20.000Z | 2021-09-21T02:25:20.000Z | # GNU MediaGoblin -- federated, autonomous media hosting
# Copyright (C) 2016 MediaGoblin contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import wtforms
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
class CustomizeSubtitlesForm(wtforms.Form):
subtitle = wtforms.TextAreaField(
('Subtitle'),
[wtforms.validators.Optional()],
description=_('Subtitles in <a href="https://en.wikipedia.org/wiki/WebVTT" target="_blank">WebVTT format</a>'))
class EditSubtitlesForm(wtforms.Form):
subtitle_language = wtforms.StringField(
'Language')
subtitle_file = wtforms.FileField(
'File',
description=_('Subtitles in <a href="https://en.wikipedia.org/wiki/WebVTT" target="_blank">WebVTT format</a>'))
| 42.575758 | 119 | 0.740214 |
acf2af812a1a5de2c8c19f125ef0161889846ce3 | 3,471 | py | Python | depth_image_proc/launch/disparity.launch.py | Kettenhoax/image_pipeline | 633d259de8f16281e45fc7a2e6c0c8eebde9cb1b | [
"Apache-2.0"
] | 1 | 2021-03-22T15:35:15.000Z | 2021-03-22T15:35:15.000Z | depth_image_proc/launch/disparity.launch.py | Kettenhoax/image_pipeline | 633d259de8f16281e45fc7a2e6c0c8eebde9cb1b | [
"Apache-2.0"
] | 2 | 2021-09-21T07:02:41.000Z | 2021-09-30T06:51:57.000Z | depth_image_proc/launch/disparity.launch.py | Kettenhoax/image_pipeline | 633d259de8f16281e45fc7a2e6c0c8eebde9cb1b | [
"Apache-2.0"
] | 2 | 2022-01-07T04:15:47.000Z | 2022-02-11T21:16:47.000Z | # Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Software License Agreement (BSD License 2.0)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
import launch_ros.actions
import launch_ros.descriptions
def generate_launch_description():
default_rviz = os.path.join(get_package_share_directory('depth_image_proc'),
'launch', 'rviz/disparity.rviz')
return LaunchDescription([
# install realsense from https://github.com/intel/ros2_intel_realsense
launch_ros.actions.Node(
package='realsense_ros2_camera', node_executable='realsense_ros2_camera',
output='screen'),
# we use realsense camera for test, realsense not support left and right topic
# so we remap to depth image only for interface test.
launch_ros.actions.ComposableNodeContainer(
name='container',
namespace='',
package='rclcpp_components',
executable='component_container',
composable_node_descriptions=[
# Driver itself
launch_ros.descriptions.ComposableNode(
package='depth_image_proc',
plugin='depth_image_proc::DisparityNode',
name='disparity_node',
remappings=[('left/image_rect', '/camera/depth/image_rect_raw'),
('right/camera_info', '/camera/depth/camera_info'),
('left/disparity', '/camera/left/disparity')]
),
],
output='screen',
),
# TODO: rviz could not display disparity(stereo_msgs)
# run stereo_view for display after image_view be ported
launch_ros.actions.Node(
package='rviz2', node_executable='rviz2', output='screen',
arguments=['--display-config', default_rviz]),
])
| 44.5 | 86 | 0.687986 |
acf2afbe3c89702e4a437df65f2e9f1643d1d1a0 | 2,465 | py | Python | encoding/autoencoder/catbox.py | FokjeM/AMQ-Scripts | 95eaa071370610ca1a99f29ccca9a78c76521427 | [
"MIT"
] | 15 | 2019-05-30T07:27:52.000Z | 2022-01-04T22:23:41.000Z | encoding/autoencoder/catbox.py | FokjeM/AMQ-Scripts | 95eaa071370610ca1a99f29ccca9a78c76521427 | [
"MIT"
] | 8 | 2019-05-17T05:44:33.000Z | 2021-03-30T21:05:27.000Z | encoding/autoencoder/catbox.py | FokjeM/AMQ-Scripts | 95eaa071370610ca1a99f29ccca9a78c76521427 | [
"MIT"
] | 21 | 2019-05-17T01:15:53.000Z | 2022-03-06T04:20:03.000Z | import requests
import datetime
import os
import re
import sys
def upload(file):
host = "https://catbox.moe/user/api.php"
origname = file
if(re.match(r"^.*\.webm$", file)):
mime_type = "video/webm"
ext = ".webm"
elif(re.match(r"^.*\.mp3$", file)):
mime_type = "audio/mpeg"
ext = ".mp3"
else:
return None
if userhash:
payload = {'reqtype': 'fileupload', 'userhash': userhash}
else:
payload = {'reqtype': 'fileupload'}
timestamp = str(int(datetime.datetime.now().timestamp()))
file = "temp" + timestamp + ext
os.rename(origname, file) # fixes special character errors
f = open(file, 'rb')
files = {'fileToUpload': (file, f, mime_type)}
response = requests.post(host, data=payload, files=files)
f.close()
os.rename(file, origname)
if response.ok:
print("upload success: %s" % response.text)
return response.text
else:
print("upload failed: %s" % response.text)
return None
def upload_from_url(url):
print("mirroring %s to catbox" % url)
host = "https://catbox.moe/user/api.php"
if userhash:
payload = {"reqtype": "urlupload", "userhash": userhash, "url": url}
else:
payload = {"reqtype": "urlupload", "url": url}
response = requests.post(host, data=payload)
if response.ok:
print("mirror success: %s" % response.text)
try:
caturl = response.text
source_extension = re.match(r".*\.(\w+)$", url).group(1)
cat_extension = re.match(r".*\.(\w+)$", caturl).group(1)
if cat_extension != source_extension:
f = open("catfail.txt", "a", encoding="utf-8")
f.write("%s -> %s\n" % (url, caturl))
f.close()
print("%s -> %s" % (url, caturl))
except Exception:
pass
return response.text
else:
print("mirror failed: %s" % response.text)
return None
userhash = None
try:
with open(sys.path[0] + os.sep + "catbox.config") as file:
match = re.search("userhash" + r"\s?=[ \t\r\f\v]*(.+)$", file.read(), re.I | re.M)
if match is None:
print("catbox.py: no userhash present")
else:
userhash = match.group(1)
except Exception:
print("catbox.py: no config file present")
if __name__ == "__main__":
f = input("select file to upload")
print(upload(f))
| 31.202532 | 90 | 0.56146 |
acf2b0305a5be3d84610634dc08cf93b052a2256 | 49,199 | py | Python | Q/questionnaire/models/models_customizations.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | null | null | null | Q/questionnaire/models/models_customizations.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | 477 | 2015-01-07T18:22:27.000Z | 2017-07-17T15:05:48.000Z | Q/questionnaire/models/models_customizations.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | null | null | null | ####################
# ES-DOC CIM Questionnaire
# Copyright (c) 2017 ES-DOC. All rights reserved.
#
# University of Colorado, Boulder
# http://cires.colorado.edu/
#
# This project is distributed according to the terms of the MIT license [http://www.opensource.org/licenses/MIT].
####################
from collections import OrderedDict
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.utils.translation import ugettext_lazy as _
from uuid import uuid4
from Q.questionnaire import APP_LABEL, q_logger
from Q.questionnaire.q_fields import QPropertyTypes, QAtomicTypes, QUnsavedRelatedManager, allow_unsaved_fk, QJSONField
from Q.questionnaire.q_utils import EnumeratedType, EnumeratedTypeList, pretty_string, find_in_sequence, serialize_model_to_dict
from Q.questionnaire.q_utils import QPathNode, QError, validate_no_spaces, validate_no_bad_chars, validate_no_reserved_words, validate_no_profanities, BAD_CHARS_LIST
from Q.questionnaire.q_constants import *
#############
# constants #
#############
class CustomizationType(EnumeratedType):
def __str__(self):
return "{0}".format(self.get_name())
CustomizationTypes = EnumeratedTypeList([
CustomizationType("MODEL", "Model Customization"),
CustomizationType("CATEGORY", "Category Customization"),
CustomizationType("PROPERTY", "Property Customization"),
])
def get_default_values_schema():
classes_schema = QCONFIG_SCHEMA["properties"]["classes"]["properties"]["defined"]["items"]
properties_schema = classes_schema["properties"]["properties"]["properties"]["defined"]["items"]
properties_values_schema = properties_schema["properties"]["values"]
return properties_values_schema
######################
# get customizations #
######################
def get_new_customizations(project=None, ontology=None, model_proxy=None, **kwargs):
key = kwargs.pop("key")
customizations = kwargs.pop("customizations", {})
# TODO: CHANGE THIS TO USE GUIDS INSTEAD OF NAMES FOR KEYS
# TODO: TRY TO REWRITE THIS TO USE "prefix" AGAIN (INSTEAD OF EXPLICIT "key")
model_proxy_key = key
if model_proxy_key not in customizations:
model_customization = QModelCustomization(
project=project,
proxy=model_proxy,
)
model_customization.reset()
customizations[model_proxy_key] = model_customization
else:
model_customization = customizations[model_proxy_key]
category_customizations = []
# TODO: IS THERE A MORE EFFICIENT WAY TO DO THIS?
# gets _all_ of the categories that are relevant to this model...
used_category_proxies = [p.category_proxy for p in model_proxy.property_proxies.all()]
category_proxies = set(model_proxy.category_proxies.all())
category_proxies.update(used_category_proxies)
for category_proxy in category_proxies:
# for category_proxy in model_proxy.category_proxies.all():
# for catgegory_proxy in ontology.category_proxies.filter(is_meta=False):
category_proxy_key = "{0}.{1}".format(model_proxy_key, category_proxy.key)
with allow_unsaved_fk(QCategoryCustomization, ["model_customization"]):
if category_proxy_key not in customizations:
category_customization = QCategoryCustomization(
project=project,
proxy=category_proxy,
model_customization=model_customization,
)
category_customization.reset()
customizations[category_proxy_key] = category_customization
else:
category_customization = customizations[category_proxy_key]
category_customizations.append(category_customization)
model_customization.category_customizations(manager="allow_unsaved_category_customizations_manager").add_potentially_unsaved(*category_customizations)
property_customizations = []
for property_proxy in model_proxy.property_proxies.all():
# for property_proxy in model_proxy.property_proxies.filter(is_meta=False):
# property_proxy_key = "{0}.{1}".format(model_proxy_key, property_proxy.name)
property_proxy_key = "{0}.{1}".format(model_proxy_key, property_proxy.key)
with allow_unsaved_fk(QPropertyCustomization, ["model_customization", "category_customization"]):
# close this context manager before using the custom related manager
# (too much hackery at once!)
if property_proxy_key not in customizations:
category_customization = find_in_sequence(
lambda c: c.proxy.has_property(property_proxy),
category_customizations
)
property_customization = QPropertyCustomization(
project=project,
proxy=property_proxy,
model_customization=model_customization,
category_customization=category_customization,
)
property_customization.reset()
category_customization.property_customizations(manager="allow_unsaved_category_customizations_manager").add_potentially_unsaved(property_customization)
customizations[property_proxy_key] = property_customization
else:
property_customization = customizations[property_proxy_key]
property_customizations.append(property_customization)
############################
# here begins the icky bit #
############################
# if property_customization.use_subforms:
# the trouble w/ using "use_subforms", above, is that it excludes hierarchical properties (which could potentially point to CIM documents)
# so instead I always fill in relationship_target_models, and rely on the template to exclude appropriate content
if property_customization.use_subforms or property_customization.relationship_is_hierarchical:
subform_key = "{0}.{1}".format(model_proxy.key, property_proxy.key)
target_model_customizations = []
for target_model_proxy in property_proxy.relationship_target_models.all():
# for target_model_proxy in property_proxy.relationship_target_models.filter(is_meta=False):
# notice how I add the "cim_id" attribute (just in-case this is a specialization w/ different objects of the same class)
# target_model_proxy_key = "{0}.{1}.{2}".format(subform_key, target_model_proxy.name, target_model_proxy.cim_id)
target_model_proxy_key = "{0}.{1}.{2}".format(subform_key, target_model_proxy.key, target_model_proxy.cim_id)
if target_model_proxy_key not in customizations:
target_model_customization = get_new_customizations(
project=project,
ontology=ontology,
model_proxy=target_model_proxy,
key=target_model_proxy_key,
customizations=customizations,
)
else:
target_model_customization = customizations[target_model_proxy_key]
target_model_customizations.append(target_model_customization)
property_customization.relationship_target_model_customizations(manager="allow_unsaved_relationship_target_model_customizations_manager").add_potentially_unsaved(*target_model_customizations)
##########################
# here ends the icky bit #
##########################
model_customization.property_customizations(manager="allow_unsaved_property_customizations_manager").add_potentially_unsaved(*property_customizations)
return customizations[model_proxy_key]
def get_existing_customizations(project=None, ontology=None, model_proxy=None, customization_name=None, customization_id=None):
"""
can get an existing customization either via id or name
:param project:
:param ontology:
:param model_proxy:
:param customization_name:
:param customization_id:
:return:
"""
# this fn will throw a "QModelCustomization.DoesNotExist" error if the name is wrong;
# it is up to the calling method to catch that and do something sensible
if not customization_id:
model_customization = QModelCustomization.objects.get(
proxy=model_proxy,
project=project,
name__iexact=customization_name,
)
else:
model_customization = QModelCustomization.objects.get(pk=customization_id)
assert model_customization.proxy == model_proxy
assert model_customization.project == project
if customization_name:
assert model_customization.name.lower() == customization_name.lower()
return model_customization
def serialize_customizations(current_model_customization, **kwargs):
"""
need a special fn to cope w/ this
b/c getting DRF to work w/ potentially infinite recursion is impossible
it is likely that these customizations will need to be serialized before they have been saved
therefore the m2m fields will not yet exist in the db
the workflow goes:
* get_new_customizations where calls to create are wrapped in "allow_unsaved_fk" & custom "QUnsavedRelatedManager"
* those customizations get cached in the current session
* AJAX calls the RESTful API to access those cached customizations
* which needs to be serialized via this fn and then passed as data to QModelCustomizationSerializer
:param customizations
:return: OrderedDict
"""
assert current_model_customization.is_new # the only reason to use this fn is w/ cached unsaved models
previously_serialized_customizations = kwargs.pop("previously_serialized_customizations", {})
prefix = kwargs.pop("prefix", None)
# get model customization stuff...
model_customization_key = current_model_customization.get_fully_qualified_key(prefix=prefix)
if model_customization_key not in previously_serialized_customizations:
model_customization_serialization = serialize_model_to_dict(
current_model_customization,
include={
"key": current_model_customization.key,
"is_document": current_model_customization.is_document,
"is_meta": current_model_customization.is_meta,
"proxy_title": str(current_model_customization.proxy),
"proxy_id": current_model_customization.proxy.cim_id,
"display_detail": False,
},
exclude=["guid", "created", "modified", "synchronization"]
)
previously_serialized_customizations[model_customization_key] = model_customization_serialization
else:
model_customization_serialization = previously_serialized_customizations[model_customization_key]
# and the categories stuff...
category_customization_serializations = []
for category_customization in current_model_customization.category_customizations(manager="allow_unsaved_category_customizations_manager").all():
category_customization_key = category_customization.get_fully_qualified_key(prefix=prefix)
if category_customization_key not in previously_serialized_customizations:
category_customization_serialization = serialize_model_to_dict(
category_customization,
include={
"key": category_customization.key,
"is_empty": category_customization.is_empty,
"is_meta": category_customization.is_meta,
"num_properties": category_customization.property_customizations(manager="allow_unsaved_category_customizations_manager").count(),
"proxy_title": str(category_customization.proxy),
"proxy_id": category_customization.proxy.cim_id,
"display_properties": True,
"display_detail": False,
},
exclude=["guid", "created", "modified"]
)
previously_serialized_customizations[category_customization_key] = category_customization_serialization
else:
category_customization_serialization = previously_serialized_customizations[category_customization_key]
category_customization_serializations.append(category_customization_serialization)
# and the properties stuff...
property_customization_serializations = []
for property_customization in current_model_customization.property_customizations(manager="allow_unsaved_property_customizations_manager").all():
property_customization_key = property_customization.get_fully_qualified_key(prefix=prefix)
if property_customization_key not in previously_serialized_customizations:
category_customization = property_customization.category_customization
property_customization_serialization = serialize_model_to_dict(
property_customization,
include={
"key": property_customization.key,
"category_key": category_customization.key,
"cardinality": property_customization.cardinality,
"proxy_title": str(property_customization.proxy),
"proxy_id": property_customization.proxy.cim_id,
"display_detail": False,
"use_subforms": property_customization.use_subforms,
"is_meta": property_customization.is_meta,
},
exclude=["guid", "created", "modified"]
)
############################
# here begins the icky bit #
############################
subform_customizations_serializations = []
# as w/ "get_new_customizations" above this if statement would have excluded hierarchical properties that happen to map to CIM documents
# if property_customization.use_subforms:
if property_customization.use_subforms or property_customization.relationship_is_hierarchical:
subform_prefix = property_customization.get_fully_qualified_key() # note I do _not_ pass the prefix kwarg
for subform_model_customization in property_customization.relationship_target_model_customizations(manager="allow_unsaved_relationship_target_model_customizations_manager").all():
subform_model_customization_key = subform_model_customization.get_fully_qualified_key(prefix=subform_prefix)
# notice how I add the cim_id in-case this is a specialization...
if property_customization.has_specialized_values:
subform_model_customization_key = "{0}.{1}".format(subform_model_customization_key, subform_model_customization.proxy.cim_id)
if subform_model_customization_key not in previously_serialized_customizations:
subform_customizations_serialization = serialize_customizations(
subform_model_customization,
previously_serialized_customizations=previously_serialized_customizations,
prefix=subform_prefix,
)
previously_serialized_customizations[subform_model_customization_key] = subform_customizations_serialization
else:
subform_customizations_serialization = previously_serialized_customizations[subform_model_customization_key]
subform_customizations_serializations.append(subform_customizations_serialization)
property_customization_serialization["relationship_target_model_customizations"] = subform_customizations_serializations
##########################
# here ends the icky bit #
##########################
else:
property_customization_serialization = previously_serialized_customizations[property_customization_key]
property_customization_serializations.append(property_customization_serialization)
# and put it all together...
serialization = OrderedDict(model_customization_serialization)
serialization["categories"] = category_customization_serializations
serialization["properties"] = property_customization_serializations
return serialization
###################
# some helper fns #
###################
def recurse_through_customizations(fn, current_model_customization, customization_types, **kwargs):
"""
recursively applies fn recursively to all customization types
:param fn: fn to call
:param current_model_customization: the model customization from which to begin checking
:param customization_types: the types of customizations to check
:return: either QModelCustomization or QCategoryCustomization or QPropertyCustomization or None
"""
previously_recursed_customizations = kwargs.pop("previously_recursed_customizations", set())
if CustomizationTypes.MODEL in customization_types:
fn(current_model_customization)
for category_customization in current_model_customization.category_customizations(manager="allow_unsaved_category_customizations_manager").all():
if CustomizationTypes.CATEGORY in customization_types:
fn(category_customization)
for property_customization in current_model_customization.property_customizations(manager="allow_unsaved_property_customizations_manager").all():
property_customization_key = property_customization.key
if property_customization_key not in previously_recursed_customizations:
if CustomizationTypes.PROPERTY in customization_types:
fn(property_customization)
# as w/ "get_new_customizations" above this if statement would have excluded hierarchical properties that happen to map to CIM documents
# if property_customization.use_subforms:
if property_customization.use_subforms or property_customization.relationship_is_hierarchical:
target_model_customizations = property_customization.relationship_target_model_customizations(manager="allow_unsaved_relationship_target_model_customizations_manager").all()
for target_model_customization in target_model_customizations:
previously_recursed_customizations.add(property_customization_key) # only tracking property_customizations b/c those are the only recursive things
recurse_through_customizations(
fn,
target_model_customization,
customization_types,
previously_recursed_customizations=previously_recursed_customizations,
)
def get_customization_by_fn(fn, current_model_customization, customization_types, **kwargs):
"""
just like the above fn, except it returns the first customization for which fn returns true
:param fn: fn to call
:param current_model_customization: the model customization from which to begin checking
:param customization_types: the types of customizations to check
:return: either QModelCustomization or QCategoryCustomization or QPropertyCustomization or None
"""
previously_recursed_customizations = kwargs.pop("previously_recursed_customizations", set())
if CustomizationTypes.MODEL in customization_types:
if fn(current_model_customization):
return current_model_customization
if CustomizationTypes.CATEGORY in customization_types:
category_customization = find_in_sequence(
fn,
current_model_customization.category_customizations(manager="allow_unsaved_category_customizations_manager").all()
)
if category_customization:
return category_customization
for property_customization in current_model_customization.property_customizations(manager="allow_unsaved_property_customizations_manager").all():
property_customization_key = property_customization.key
if property_customization_key not in previously_recursed_customizations:
if CustomizationTypes.PROPERTY in customization_types and fn(property_customization):
return property_customization
# as w/ "get_new_customizations" above, this if statement would have excluded hierarchical properties that happen to map to CIM documents
# if property_customization.use_subforms:
if property_customization.use_subforms or property_customization.relationship_is_hierarchical:
target_model_customizations = property_customization.relationship_target_model_customizations(manager="allow_unsaved_relationship_target_model_customizations_manager").all()
previously_recursed_customizations.add(property_customization_key) # only tracking property_customizations b/c those are the only recursive things
for target_model_customization in target_model_customizations:
matching_customization = get_customization_by_fn(
fn,
target_model_customization,
customization_types,
previously_recursed_customizations=previously_recursed_customizations,
)
if matching_customization: # break out of the for loop as soon as I found a match
return matching_customization
def get_model_customization_by_key(key, current_model_customization, **kwargs):
return get_customization_by_fn(
lambda c: c.key == key,
current_model_customization,
[CustomizationTypes.MODEL],
)
def get_category_customization_by_key(key, current_model_customization, **kwargs):
return get_customization_by_fn(
lambda c: c.key == key,
current_model_customization,
[CustomizationTypes.CATEGORY],
)
def get_property_customization_by_key(key, current_model_customization, **kwargs):
return get_customization_by_fn(
lambda c: c.key == key,
current_model_customization,
[CustomizationTypes.PROPERTY],
)
def set_name(model_customization, new_name):
recurse_through_customizations(
lambda c: c.set_name(new_name),
model_customization,
[CustomizationTypes.MODEL, CustomizationTypes.CATEGORY, CustomizationTypes.PROPERTY],
)
def set_owner(model_customization, new_owner):
recurse_through_customizations(
lambda c: c.set_owner(new_owner),
model_customization,
[CustomizationTypes.MODEL],
)
def set_shared_owner(model_customization, new_owner):
recurse_through_customizations(
lambda c: c.set_shared_owner(new_owner),
model_customization,
[CustomizationTypes.MODEL],
)
from collections import deque # not sure I need the full complexity of a deque, but in my head I want a linked-list so this makes sense
def get_customization_path(customization, **kwargs):
"""
given a customization anywhere in a hierarchy of customizations,
returns a linked-list describing the path to take from the root customization to get to it
:param get_customization_path:
:param kwargs:
:return:
"""
path = kwargs.pop("path", deque())
if isinstance(customization, QPropertyCustomization):
path.appendleft(QPathNode("PROPERTY", customization.guid, customization.proxy))
return get_customization_path(customization.model, path=path)
elif isinstance(customization, QModelCustomization):
path.appendleft(QPathNode("MODEL", customization.guid, customization.proxy))
parent_property = customization.relationship_source_property_customization
if parent_property:
return get_customization_path(parent_property, path=path)
return path
else:
# TODO: ADD SUPPORT FOR QCategoryCustomizations
msg = "I don't know how to find the path for {0}".format(customization)
raise QError(msg)
def walk_customization_path(customization, path):
"""
given a root customization, follows a linked-list describing the path to take to get to a specific customization
:param realization:
:param path:
:return:
"""
node = path.popleft() # get the root node
assert customization.proxy.guid == node.proxy.guid # make sure we're starting at the right place
# walk along the remaining path moving through the realizations...
while len(path):
node = path.popleft()
if node.type == CustomizationTypes.PROPERTY:
assert isinstance(customization, QModelCustomization) # (if I'm looking for a property, I must be at a model)
customization = customization.property_customizations(manager="allow_unsaved_property_customizations_manager").get(proxy=node.proxy)
elif node.type == CustomizationTypes.MODEL:
assert isinstance(customization, QPropertyCustomization) # (if I'm looking for a model, I must be at a [RELATIONSHIP] property)
customization = customization.relationship_target_model_customizations(manager="allow_unsaved_relationship_target_model_customizations_manager").get(proxy=node.proxy)
# TODO: ADD SUPPORT FOR QCategoryCustomization
return customization
#####################
# the actual models #
#####################
class QCustomization(models.Model):
class Meta:
app_label = APP_LABEL
abstract = True
guid = models.UUIDField(default=uuid4, editable=False)
created = models.DateTimeField(auto_now_add=True, editable=False)
modified = models.DateTimeField(auto_now=True, editable=False)
# all customizations share a name
# (this makes finding related customizations simple: ".filter(project=parent.project, name=parent.name)" )
name = models.CharField(
max_length=LIL_STRING,
blank=False,
verbose_name="Customization Name",
validators=[validate_no_bad_chars, validate_no_spaces, validate_no_reserved_words, validate_no_profanities],
help_text="A unique name for this customization. Only alphanumeric characters are allowed."
)
def __eq__(self, other):
if isinstance(other, QCustomization):
return self.guid == other.guid
return NotImplemented
def __ne__(self, other):
equality_result = self.__eq__(other)
if equality_result is NotImplemented:
return equality_result
return not equality_result
@property
def key(self):
# convert self.guid to str b/c UUID does not play nicely w/ JSON
return str(self.guid)
@property
def is_meta(self):
return self.proxy.is_meta is True
@property
def is_new(self):
return self.pk is None
@property
def is_existing(self):
return self.pk is not None
@classmethod
def get_field(cls, field_name):
"""
convenience fn for getting the Django Field instance from a model class
note that this is a classmethod; when called from an instance it will just convert that instance to its class
"""
try:
field = cls._meta.get_field_by_name(field_name)
return field[0]
except FieldDoesNotExist:
return None
def get_unique_together(self):
"""
'unique_together' validation is only enforced if all the unique_together fields appear in the ModelForm
this fn returns the fields to check for manual validation
"""
unique_together = self._meta.unique_together
return list(unique_together)
def reset(self, **kwargs):
msg = "{0} must define a custom 'reset' method.".format(self.__class__.__name__)
raise NotImplementedError(msg)
class QModelCustomizationQuerySet(models.QuerySet):
"""
As of Django 1.7 I can use custom querysets as managers
to ensure that these custom methods are chainable
whoo-hoo
"""
def documents(self):
return self.filter(proxy__is_document=True)
def owned_documents(self, user):
return self.documents().filter(owner=user)
def shared_documents(self, user):
return self.documents().filter(shared_owners__in=[user.pk])
class QModelCustomization(QCustomization):
class Meta:
app_label = APP_LABEL
abstract = False
ordering = ("order", )
verbose_name = "_Questionnaire Customization: Model"
verbose_name_plural = "_Questionnaire Customizations: Models"
class _QModelCustomizationUnsavedRelatedManager(QUnsavedRelatedManager):
field_name = "relationship_source_property_customization"
objects = QModelCustomizationQuerySet.as_manager()
allow_unsaved_relationship_target_model_customizations_manager = _QModelCustomizationUnsavedRelatedManager()
owner = models.ForeignKey(User, blank=False, null=True, related_name="owned_customizations", on_delete=models.SET_NULL)
shared_owners = models.ManyToManyField(User, blank=True, related_name="shared_customizations")
project = models.ForeignKey("QProject", blank=False, related_name="model_customizations")
proxy = models.ForeignKey("QModelProxy", blank=False, related_name="model_customizations")
synchronization = models.ManyToManyField("QSynchronization", blank=True)
order = models.PositiveIntegerField(blank=False)
documentation = models.TextField(
blank=True,
null=True,
help_text="An explanation of how this customization is intended to be used. This information is for informational purposes only.",
verbose_name="Customization Description",
)
is_default = models.BooleanField(
default=False,
help_text="Every CIM Document Type must have one default customization. If this is the first customization you are creating, please ensure this checkbox is selected.",
verbose_name="Is Default Customization?"
)
model_title = models.CharField(
max_length=BIG_STRING,
verbose_name="Name that should appear on the Document Form",
blank=False, null=True
)
model_description = models.TextField(
blank=True,
null=True,
help_text="This text will appear as documentation in the editing form. Inline HTML formatting is permitted. The initial documentation comes from the ontology.",
verbose_name="A description of the document",
)
model_hierarchy_title = models.CharField(
max_length=SMALL_STRING,
help_text="This text will appear as a label for the tree view widget used to navigate the hierarchy of components",
verbose_name="Title to use for the component hierarchy tree",
blank=True, null=True,
)
model_show_empty_categories = models.BooleanField(
default=False,
verbose_name="Display empty categories?",
help_text="Include categories in the editing form for which there are no (visible) properties associated with.",
)
# this fk is just here to provide the other side of the relationship to property_customization
# I only ever access "property_customization.relationship_target_model_customizations"
relationship_source_property_customization = models.ForeignKey("QPropertyCustomization", blank=True, null=True, related_name="relationship_target_model_customizations")
def __str__(self):
return pretty_string(self.name)
def get_fully_qualified_key(self, prefix=None):
fully_qualified_key = "{0}.{1}".format(
self.proxy.fully_qualified_key,
self.key,
)
if prefix:
return "{0}.{1}".format(prefix, fully_qualified_key)
return fully_qualified_key
@property
def is_synchronized(self):
return self.synchronization.count() == 0 # checks if qs is empty
@property
def is_unsynchronized(self):
return not self.is_synchronized
@property
def is_document(self):
return self.proxy.is_document is True
def set_name(self, new_name):
# used w/ "recurse_through_customization" in global fn "set_name" above
self.name = new_name
def set_owner(self, new_owner):
# used w/ "recurse_through_customization" in global fn "set_owner" above
self.owner = new_owner
def set_shared_owner(self, new_shared_owner):
# used w/ "recurse_through_customization" in global fn "set_shared_owner" above
self.shared_owners.add(new_shared_owner)
def clean(self, *args, **kwargs):
other_customizers = QModelCustomization.objects.filter(
proxy=self.proxy,
project=self.project,
).exclude(pk=self.pk)
# there can be only 1 "default" customization for each project/proxy/ontology combination
if self.is_default:
if other_customizers.filter(is_default=True).count() != 0:
raise ValidationError({
"is_default": _("A default customization for this document_type already exists. There can be only one default customization per project.")
})
if self.proxy.is_document:
if other_customizers.filter(proxy__is_document=True, name=self.name).count() != 0:
raise ValidationError({
"name": _("A customization for this document_type and project with this name already exists."),
"proxy": _("A customization for this document_type and project with this name already exists."),
"project": _("A customization for this document_type and project with this name already exists."),
})
super(QModelCustomization, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
# force all (custom) "clean" methods to run
self.full_clean()
super(QModelCustomization, self).save(*args, **kwargs)
def reset(self, **kwargs):
force_save = kwargs.pop("force_save", False)
proxy = self.proxy
self.order = proxy.order
self.model_title = pretty_string(proxy.name)
self.model_description = proxy.documentation
self.model_show_empty_categories = False
if self.proxy.has_hierarchical_properties:
self.model_hierarchy_title = "Component Hierarchy"
if force_save:
self.save()
##################################################
# some fns which are called from signal handlers #
##################################################
def updated_ontology(self):
raise NotImplementedError
##############
# categories #
##############
class QCategoryCustomization(QCustomization):
class Meta:
app_label = APP_LABEL
abstract = False
verbose_name = "_Questionnaire Customization: Category"
verbose_name_plural = "_Questionnaire Customizations: Categories"
ordering = ("order",)
class _QCategoryCustomizationUnsavedRelatedManager(QUnsavedRelatedManager):
field_name = "model_customization"
# custom managers...
objects = models.Manager()
allow_unsaved_category_customizations_manager = _QCategoryCustomizationUnsavedRelatedManager()
project = models.ForeignKey("QProject", blank=False, related_name="category_customizations")
proxy = models.ForeignKey("QCategoryProxy", blank=False)
model_customization = models.ForeignKey("QModelCustomization", blank=False, related_name="category_customizations")
category_title = models.CharField(max_length=TINY_STRING, blank=False, validators=[validate_no_profanities], verbose_name="Title")
category_description = models.TextField(blank=True, null=True, verbose_name="Description")
is_hidden = models.BooleanField(default=False, verbose_name="Should this category <u>not</u> be displayed?")
is_hidden.help_text = _(
"Note that hiding a category will not hide all of its member properties; "
"It will simply not render them in a parent tab."
)
order = models.PositiveIntegerField(blank=True, null=True, verbose_name="Order")
order.help_text = _(
"Do not modify this value directly <em>here</em>. "
"Instead, drag and drop individual category widgets on the main form."
)
def __str__(self):
return pretty_string(self.category_title)
@property
def is_empty(self):
n_displayed_properties = self.property_customizations.filter(is_hidden=False).count()
return n_displayed_properties == 0
def get_fully_qualified_key(self, prefix=None):
fully_qualified_key = "{0}.{1}".format(
self.proxy.fully_qualified_key,
self.key,
)
if prefix:
return "{0}.{1}".format(prefix, fully_qualified_key)
return fully_qualified_key
def has_property(self, property_customization):
return property_customization in self.property_customizations.all()
def set_name(self, new_name):
# used w/ "recurse_through_customization" in global fn "set_name" above
self.name = new_name
def reset(self, **kwargs):
force_save = kwargs.pop("force_save", None)
proxy = self.proxy
self.category_title = proxy.name
self.category_description = proxy.documentation
self.is_hidden = proxy.is_uncategorized
self.order = proxy.order
if force_save:
self.save()
###########################
# property customizations #
###########################
class QPropertyCustomization(QCustomization):
class Meta:
app_label = APP_LABEL
abstract = False
verbose_name = "_Questionnaire Customization: Property"
verbose_name_plural = "_Questionnaire Customizations: Properties"
ordering = ("order",)
class _QPropertyCustomizationUnsavedRelatedManager(QUnsavedRelatedManager):
field_name = "model_customization"
class _QCategoryCustomizationUnsavedRelatedManager(QUnsavedRelatedManager):
field_name = "category_customization"
# custom managers...
objects = models.Manager()
allow_unsaved_property_customizations_manager = _QPropertyCustomizationUnsavedRelatedManager()
allow_unsaved_category_customizations_manager = _QCategoryCustomizationUnsavedRelatedManager()
project = models.ForeignKey("QProject", blank=False, related_name="property_customizations")
proxy = models.ForeignKey("QPropertyProxy", blank=False, null=False)
model_customization = models.ForeignKey("QModelCustomization", blank=False, related_name="property_customizations")
category_customization = models.ForeignKey("QCategoryCustomization", blank=True, null=True, related_name="property_customizations")
# all property types...
property_title = models.CharField(max_length=LIL_STRING, blank=False, validators=[validate_no_profanities, ])
is_required = models.BooleanField(default=True, blank=True, verbose_name="Is this property required?")
is_required.help_text = _(
"All required properties must be completed prior to publication. "
"A property that is defined as required <em>in the CIM</em> cannot be made optional."
)
is_hidden = models.BooleanField(default=True, blank=True, verbose_name="Should this property <u>not</u> be displayed?")
is_hidden.help_text = _(
"A property that is defined as required in an ontology should not be hidden."
)
is_editable = models.BooleanField(default=True, verbose_name="Can this property be edited?")
is_editable.help_text = _(
"If this field is disabled, this is because a default value was set by the CIM itself "
"and should not therefore be overridden by the ES-DOC Questionnaire."
)
is_nillable = models.BooleanField(default=True, verbose_name="Should <i>nillable</i> options be allowed?")
is_nillable.help_text = \
"A nillable property can be intentionally left blank for several reasons: {0}. In general, relationship properties needn't be made nillable, since the relationships can simply be removed.".format(
", ".join([nr[0] for nr in NIL_REASONS])
)
property_description = models.TextField(
blank=True,
null=True,
verbose_name=_(
"What is the help text to associate with this property?"
"<p class='documentation'>Any initial help text comes from the CIM.</p>"
"<p class='documentation'>Note that basic HTML tags are supported.</p>"
)
)
inline_help = models.BooleanField(default=False, blank=True, verbose_name="Should the help text be displayed inline?")
order = models.PositiveIntegerField(blank=True, null=True)
field_type = models.CharField(max_length=BIG_STRING, blank=False, choices=[(ft.get_type(), ft.get_name()) for ft in QPropertyTypes])
can_inherit = models.BooleanField(default=False, verbose_name="Can this property be inherited by children?")
can_inherit.help_text = _(
"Enabling inheritance will allow the corresponding properties of child components to 'inherit' the value of this property. "
"The editing form will allow users the ability to 'opt-out' of this inheritance."
)
default_values = QJSONField(
blank=True, null=True, schema=get_default_values_schema,
verbose_name=_(
"What are the default values for this property?"
"<p class='documentation'>Please enter a comma-separated list of strings.</p>"
),
help_text=_(
"If this field is disabled, this is because a default value was set by the ontology itself"
"and should not therefore be overridden by the ES-DOC Questionnaire. "
"<em>In this case, the property should also not be editable.</em>"
)
)
# ATOMIC property types...
atomic_type = models.CharField(
max_length=BIG_STRING,
blank=False,
verbose_name="How should this field be rendered?",
choices=[(ft.get_type(), ft.get_name()) for ft in QAtomicTypes],
default=QAtomicTypes.DEFAULT.get_type(),
help_text=_(
"By default, all fields are rendered as strings. "
"However, a field can be customized to accept longer snippets of text, dates, email addresses, etc."
)
)
atomic_suggestions = models.TextField(
blank=True,
null=True,
verbose_name="Are there any suggestions you would like to offer as auto-completion options?",
help_text="Please enter a '|' separated list of words or phrases."
)
# ENUMERATION fields...
enumeration_is_open = models.BooleanField(default=False, verbose_name='Can a user can specify a custom "OTHER" value?')
# RELATIONSHIP fields...
# using the reverse of the fk defined on model_customization instead of this field
# (so that I can use a custom manager to cope w/ unsaved instances)
# relationship_target_model_customizations = models.ManyToManyField("QModelCustomization", blank=True, related_name="+")
relationship_show_subforms = models.BooleanField(
default=False,
verbose_name=_(
"Should this property be rendered in its own subform?"
"<p class='documentation'>Note that a relationship to another CIM Document <u>cannot</u> use subforms, "
"while a relationship to anything else <u>must</u> use subforms.</p>"
),
help_text=_(
"Checking this will cause the property to be rendered as a nested subform within the parent form; "
"All properties of the target model will be available to view and edit in that subform. "
"Unchecking it will cause the attribute to be rendered as a <em>reference</em> widget. "
"<br/>(Note that a "hierarchical" model can still be customized using this technique even though "
"the corresponding target models will display as top-level forms rather than subforms.)"
)
)
relationship_is_hierarchical = models.BooleanField(
default=False,
verbose_name=_(
"Should this property be rendered as part of a hierarchy?"
),
help_text=_(
"Checking this will cause the property to be rendered in a treeview; "
"All properties of the target model will be avaialble as a pane next to that treeview. "
"This value is set by the ontology itself. Unless you know what you're doing, <em>don't mess with it</em>."
)
)
def __str__(self):
return pretty_string(self.proxy.name)
def get_fully_qualified_key(self, prefix=None):
fully_qualified_key = "{0}.{1}".format(
self.proxy.fully_qualified_key,
self.key
)
if prefix:
return "{0}.{1}".format(prefix, fully_qualified_key)
return fully_qualified_key
def set_name(self, new_name):
# used w/ "recurse_through_customization" in global fn "set_name" above
self.name = new_name
@property
def cardinality_min(self):
cardinality_min = self.proxy.cardinality_min
return int(cardinality_min)
@property
def cardinality_max(self):
cardinality_max = self.proxy.cardinality_max
if cardinality_max != CARDINALITY_INFINITE:
return int(cardinality_max)
return cardinality_max
@property
def cardinality(self):
return "{0}.{1}".format(
self.cardinality_min,
self.cardinality_max,
)
@property
def is_infinite(self):
return self.cardinality_max == CARDINALITY_INFINITE
@property
def is_multiple(self):
return self.is_infinite or int(self.cardinality_max) > 1
@property
def is_single(self):
return int(self.cardinality_max) == 1
@property
def use_references(self):
"""
As of v0.14 all RELATIONSHIPS to a CIM Document _must_ use a reference
:return: Boolean
"""
if self.field_type == QPropertyTypes.RELATIONSHIP:
target_models_are_documents = [tm.is_document for tm in self.proxy.relationship_target_models.all()]
assert len(set(target_models_are_documents)) == 1
return all(target_models_are_documents)
return False
@property
def use_subforms(self):
"""
As of v0.14 all RELATIONSHIPS to a CIM Entity (non-Document) _must_ use a subform
:return: Boolean
"""
if self.field_type == QPropertyTypes.RELATIONSHIP:
target_models_are_documents = [tm.is_document for tm in self.proxy.relationship_target_models.all()]
assert len(set(target_models_are_documents)) == 1
# try:
# assert len(set(target_models_are_documents)) == 1
# except:
# import ipdb; ipdb.set_trace()
return not any(target_models_are_documents)
return False
@property
def has_specialized_values(self):
return bool(self.proxy.values) # returns False if is None or an empty list
def reset(self, **kwargs):
force_save = kwargs.pop("force_save", False)
assert self.category_customization is not None # even "uncategorized" properties should use the "UncategorizedCategory"
proxy = self.proxy
self.field_type = proxy.field_type
# ATOMIC fields...
if self.field_type == QPropertyTypes.ATOMIC:
self.atomic_type = proxy.atomic_type
self.atomic_suggestions = ""
# ENUMERATION fields...
elif self.field_type == QPropertyTypes.ENUMERATION:
self.enumeration_is_open = proxy.enumeration_is_open
# TODO: DO I NEED TO DEAL W/ "enumeration_choices" OR "enumeration_default" ?
# RELATIONSHIP fields...
else: # self.field_type == QPropertyTypes.RELATIONSHIP
self.relationship_show_subforms = self.use_subforms
self.relationship_is_hierarchical = proxy.is_hierarchical
# all fields...
self.property_title = pretty_string(proxy.name)
self.property_description = proxy.documentation
self.order = proxy.order
self.is_required = proxy.is_required
self.is_hidden = self.cardinality_min == 0 and self.cardinality_max == 0 # False # not proxy.is_required
self.is_nillable = not proxy.is_required and not self.field_type == QPropertyTypes.RELATIONSHIP
self.inline_help = False
self.default_values = proxy.values
self.is_editable = not self.has_specialized_values # if the proxy provided default values, then do not allow the customizer to override them
self.can_inherit = False
if force_save:
self.save()
| 45.42844 | 205 | 0.692575 |
acf2b0911ae8e374cc99d0b36971ebf028b5a50f | 72 | py | Python | docker/test.py | geogeorgiev/opencv-toolkit | ff4bd1a0c3b7e80b6e1808e289be908dec3adf4a | [
"MIT"
] | null | null | null | docker/test.py | geogeorgiev/opencv-toolkit | ff4bd1a0c3b7e80b6e1808e289be908dec3adf4a | [
"MIT"
] | null | null | null | docker/test.py | geogeorgiev/opencv-toolkit | ff4bd1a0c3b7e80b6e1808e289be908dec3adf4a | [
"MIT"
] | null | null | null | import picamera
camera=picamera.PiCamera()
camera.capture('image.jpg')
| 14.4 | 27 | 0.791667 |
acf2b0cfb153e169498ae38ae994e371fcaf7511 | 7,241 | py | Python | sdk/lusid_asyncio/models/model_selection.py | finbourne/lusid-sdk-python-asyncio-preview | 290f93590ab5485661216c8622d3de9f7af0ed60 | [
"MIT"
] | null | null | null | sdk/lusid_asyncio/models/model_selection.py | finbourne/lusid-sdk-python-asyncio-preview | 290f93590ab5485661216c8622d3de9f7af0ed60 | [
"MIT"
] | null | null | null | sdk/lusid_asyncio/models/model_selection.py | finbourne/lusid-sdk-python-asyncio-preview | 290f93590ab5485661216c8622d3de9f7af0ed60 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3923
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid_asyncio.configuration import Configuration
class ModelSelection(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'library': 'str',
'model': 'str'
}
attribute_map = {
'library': 'library',
'model': 'model'
}
required_map = {
'library': 'required',
'model': 'required'
}
def __init__(self, library=None, model=None, local_vars_configuration=None): # noqa: E501
"""ModelSelection - a model defined in OpenAPI"
:param library: The available values are: Lusid, RefinitivQps, RefinitivTracsWeb, VolMaster, IsdaCds (required)
:type library: str
:param model: The available values are: SimpleStatic, Discounting, VendorDefault, BlackScholes, ConstantTimeValueOfMoney, Bachelier, ForwardWithPoints, ForwardWithPointsUndiscounted, ForwardSpecifiedRate, ForwardSpecifiedRateUndiscounted, IndexNav, IndexPrice, InlinedIndex, ForwardFromCurve, ForwardFromCurveUndiscounted (required)
:type model: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._library = None
self._model = None
self.discriminator = None
self.library = library
self.model = model
@property
def library(self):
"""Gets the library of this ModelSelection. # noqa: E501
The available values are: Lusid, RefinitivQps, RefinitivTracsWeb, VolMaster, IsdaCds # noqa: E501
:return: The library of this ModelSelection. # noqa: E501
:rtype: str
"""
return self._library
@library.setter
def library(self, library):
"""Sets the library of this ModelSelection.
The available values are: Lusid, RefinitivQps, RefinitivTracsWeb, VolMaster, IsdaCds # noqa: E501
:param library: The library of this ModelSelection. # noqa: E501
:type library: str
"""
if self.local_vars_configuration.client_side_validation and library is None: # noqa: E501
raise ValueError("Invalid value for `library`, must not be `None`") # noqa: E501
allowed_values = ["Lusid", "RefinitivQps", "RefinitivTracsWeb", "VolMaster", "IsdaCds"] # noqa: E501
if self.local_vars_configuration.client_side_validation and library not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `library` ({0}), must be one of {1}" # noqa: E501
.format(library, allowed_values)
)
self._library = library
@property
def model(self):
"""Gets the model of this ModelSelection. # noqa: E501
The available values are: SimpleStatic, Discounting, VendorDefault, BlackScholes, ConstantTimeValueOfMoney, Bachelier, ForwardWithPoints, ForwardWithPointsUndiscounted, ForwardSpecifiedRate, ForwardSpecifiedRateUndiscounted, IndexNav, IndexPrice, InlinedIndex, ForwardFromCurve, ForwardFromCurveUndiscounted # noqa: E501
:return: The model of this ModelSelection. # noqa: E501
:rtype: str
"""
return self._model
@model.setter
def model(self, model):
"""Sets the model of this ModelSelection.
The available values are: SimpleStatic, Discounting, VendorDefault, BlackScholes, ConstantTimeValueOfMoney, Bachelier, ForwardWithPoints, ForwardWithPointsUndiscounted, ForwardSpecifiedRate, ForwardSpecifiedRateUndiscounted, IndexNav, IndexPrice, InlinedIndex, ForwardFromCurve, ForwardFromCurveUndiscounted # noqa: E501
:param model: The model of this ModelSelection. # noqa: E501
:type model: str
"""
if self.local_vars_configuration.client_side_validation and model is None: # noqa: E501
raise ValueError("Invalid value for `model`, must not be `None`") # noqa: E501
allowed_values = ["SimpleStatic", "Discounting", "VendorDefault", "BlackScholes", "ConstantTimeValueOfMoney", "Bachelier", "ForwardWithPoints", "ForwardWithPointsUndiscounted", "ForwardSpecifiedRate", "ForwardSpecifiedRateUndiscounted", "IndexNav", "IndexPrice", "InlinedIndex", "ForwardFromCurve", "ForwardFromCurveUndiscounted"] # noqa: E501
if self.local_vars_configuration.client_side_validation and model not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `model` ({0}), must be one of {1}" # noqa: E501
.format(model, allowed_values)
)
self._model = model
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelSelection):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ModelSelection):
return True
return self.to_dict() != other.to_dict()
| 37.910995 | 352 | 0.633752 |
acf2b1bcdd7f7b5490c954e1419d3c8124cc5258 | 1,558 | py | Python | pngparser/image.py | AlexGuo1998/png-parser | 4d75f17f158acd6e1f86117bde2a0c5bfceede76 | [
"MIT"
] | 58 | 2019-09-22T20:52:14.000Z | 2022-03-13T21:31:56.000Z | pngparser/image.py | AlexGuo1998/png-parser | 4d75f17f158acd6e1f86117bde2a0c5bfceede76 | [
"MIT"
] | 4 | 2020-01-14T11:10:21.000Z | 2021-12-27T15:21:39.000Z | pngparser/image.py | AlexGuo1998/png-parser | 4d75f17f158acd6e1f86117bde2a0c5bfceede76 | [
"MIT"
] | 7 | 2021-02-14T19:39:03.000Z | 2021-12-25T07:39:49.000Z | from PIL import Image as PilImage
from .pixel import Pixel
class Image:
def __init__(self, pixel_type, width, height) -> None:
self.width = width
self.height = height
self.pixel_type = pixel_type
self.data = [Pixel(pixel_type)] * (width * height)
def putpixel(self, position, pixel: Pixel):
# if not isinstance(pixel, Pixel):
# raise Exception('Must be type Pixel')
x, y = position
if x < 0 or x > self.width:
raise IndexError('x outside image')
if y < 0 or y > self.height:
raise IndexError('y outside image')
pos = x + y * self.width
self.data[pos] = pixel
def getpixel(self, position):
x, y = position
if x < 0 or x > self.width:
raise IndexError('x outside image')
if y < 0 or y > self.height:
raise IndexError('y outside image')
pos = x + y * self.width
return self.data[pos]
def show(self) -> None:
pil_pixel_type = 'RGB'
if self.pixel_type == 0: # Greyscale
pil_pixel_type = 'L' # 1
elif self.pixel_type == 2: # RGB
pil_pixel_type = 'RGB' # 3
elif self.pixel_type == 4: # Greyscale + alpha
pil_pixel_type = 'LA' # 2
elif self.pixel_type == 6: # RGB + Alpha
pil_pixel_type = 'RGBA' # 4
pil_img = PilImage.new(pil_pixel_type, (self.width, self.height))
pil_img.putdata(list(map(lambda x: x.values, self.data)))
pil_img.show()
| 30.54902 | 73 | 0.558408 |
acf2b2a9c51c9d9f01fa20cd6bb2930410dfd08d | 27,472 | py | Python | dqscrapy/youdl/testYoutubeDL.py | dainixiao/DnxScrapy | 7aa743691b73f01d23374287df64cf6a1a3b40a5 | [
"MIT"
] | 1 | 2016-06-18T11:12:14.000Z | 2016-06-18T11:12:14.000Z | dqscrapy/youdl/testYoutubeDL.py | ipetu/DnxScrapy | 7aa743691b73f01d23374287df64cf6a1a3b40a5 | [
"MIT"
] | null | null | null | dqscrapy/youdl/testYoutubeDL.py | ipetu/DnxScrapy | 7aa743691b73f01d23374287df64cf6a1a3b40a5 | [
"MIT"
] | 1 | 2017-01-11T06:22:02.000Z | 2017-01-11T06:22:02.000Z | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
# Created by Liuwf on 16/7/4
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import copy
from youdl.helper import FakeYDL, assertRegexpMatches
from youtube_dl import YoutubeDL
from youtube_dl.compat import compat_str, compat_urllib_error
from youtube_dl.extractor import YoutubeIE
from youtube_dl.extractor.common import InfoExtractor
from youtube_dl.postprocessor.common import PostProcessor
from youtube_dl.utils import ExtractorError, match_filter_func
TEST_URL = 'https://www.youtube.com/watch?v=sOGuV_Cl4no'
class YDL(FakeYDL):
def __init__(self, *args, **kwargs):
super(YDL, self).__init__(*args, **kwargs)
self.downloaded_info_dicts = []
self.msgs = []
def process_info(self, info_dict):
self.downloaded_info_dicts.append(info_dict)
def to_screen(self, msg):
self.msgs.append(msg)
def _make_result(formats, **kwargs):
res = {
'formats': formats,
'id': 'testid',
'title': 'testttitle',
'extractor': 'testex',
}
res.update(**kwargs)
return res
class TestFormatSelection(unittest.TestCase):
def test_prefer_free_formats(self):
# Same resolution => download webm
ydl = YDL()
ydl.params['prefer_free_formats'] = True
formats = [
{'ext': 'webm', 'height': 460, 'url': TEST_URL},
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
]
info_dict = _make_result(formats)
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'webm')
# Different resolution => download best quality (mp4)
ydl = YDL()
ydl.params['prefer_free_formats'] = True
formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4')
# No prefer_free_formats => prefer mp4 and flv for greater compatibility
ydl = YDL()
ydl.params['prefer_free_formats'] = False
formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 720, 'url': TEST_URL},
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4')
ydl = YDL()
ydl.params['prefer_free_formats'] = False
formats = [
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'flv')
def test_format_selection(self):
formats = [
{'format_id': '35', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
{'format_id': '2', 'ext': 'flv', 'preference': 4, 'url': TEST_URL},
]
info_dict = _make_result(formats)
ydl = YDL({'format': '20/47'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '20/71/worst'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35')
ydl = YDL()
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '2')
ydl = YDL({'format': 'webm/mp4'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '3gp/40/mp4'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35')
ydl = YDL({'format': 'example-with-dashes'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'example-with-dashes')
def test_format_selection_audio(self):
formats = [
{'format_id': 'audio-low', 'ext': 'webm', 'preference': 1, 'vcodec': 'none', 'url': TEST_URL},
{'format_id': 'audio-mid', 'ext': 'webm', 'preference': 2, 'vcodec': 'none', 'url': TEST_URL},
{'format_id': 'audio-high', 'ext': 'flv', 'preference': 3, 'vcodec': 'none', 'url': TEST_URL},
{'format_id': 'vid', 'ext': 'mp4', 'preference': 4, 'url': TEST_URL},
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-high')
ydl = YDL({'format': 'worstaudio'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-low')
formats = [
{'format_id': 'vid-low', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
{'format_id': 'vid-high', 'ext': 'mp4', 'preference': 2, 'url': TEST_URL},
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio/worstaudio/best'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-high')
def test_format_selection_audio_exts(self):
formats = [
{'format_id': 'mp3-64', 'ext': 'mp3', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
{'format_id': 'ogg-64', 'ext': 'ogg', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
{'format_id': 'aac-64', 'ext': 'aac', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
{'format_id': 'mp3-32', 'ext': 'mp3', 'abr': 32, 'url': 'http://_', 'vcodec': 'none'},
{'format_id': 'aac-32', 'ext': 'aac', 'abr': 32, 'url': 'http://_', 'vcodec': 'none'},
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'best'})
ie = YoutubeIE(ydl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'aac-64')
ydl = YDL({'format': 'mp3'})
ie = YoutubeIE(ydl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'mp3-64')
ydl = YDL({'prefer_free_formats': True})
ie = YoutubeIE(ydl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'ogg-64')
def test_format_selection_video(self):
formats = [
{'format_id': 'dash-video-low', 'ext': 'mp4', 'preference': 1, 'acodec': 'none', 'url': TEST_URL},
{'format_id': 'dash-video-high', 'ext': 'mp4', 'preference': 2, 'acodec': 'none', 'url': TEST_URL},
{'format_id': 'vid', 'ext': 'mp4', 'preference': 3, 'url': TEST_URL},
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-high')
ydl = YDL({'format': 'worstvideo'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low')
ydl = YDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low')
formats = [
{'format_id': 'vid-vcodec-dot', 'ext': 'mp4', 'preference': 1, 'vcodec': 'avc1.123456', 'acodec': 'none',
'url': TEST_URL},
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo[vcodec=avc1.123456]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
def test_youtube_format_selection(self):
order = [
'38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
# Apple HTTP Live Streaming
'96', '95', '94', '93', '92', '132', '151',
# 3D
'85', '84', '102', '83', '101', '82', '100',
# Dash video
'137', '248', '136', '247', '135', '246',
'245', '244', '134', '243', '133', '242', '160',
# Dash audio
'141', '172', '140', '171', '139',
]
def format_info(f_id):
info = YoutubeIE._formats[f_id].copy()
# XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
# and 'vcodec', while in tests such information is incomplete since
# commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
# test_YoutubeDL.test_youtube_format_selection is broken without
# this fix
if 'acodec' in info and 'vcodec' not in info:
info['vcodec'] = 'none'
elif 'vcodec' in info and 'acodec' not in info:
info['acodec'] = 'none'
info['format_id'] = f_id
info['url'] = 'url:' + f_id
return info
formats_order = [format_info(f_id) for f_id in order]
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo+bestaudio'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '137+141')
self.assertEqual(downloaded['ext'], 'mp4')
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '38')
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137', '141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['248+141'])
for f1, f2 in zip(formats_order, formats_order[1:]):
info_dict = _make_result([f1, f2], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id'])
info_dict = _make_result([f2, f1], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id'])
def test_invalid_format_specs(self):
def assert_syntax_error(format_spec):
ydl = YDL({'format': format_spec})
info_dict = _make_result([{'format_id': 'foo', 'url': TEST_URL}])
self.assertRaises(SyntaxError, ydl.process_ie_result, info_dict)
assert_syntax_error('bestvideo,,best')
assert_syntax_error('+bestaudio')
assert_syntax_error('bestvideo+')
assert_syntax_error('/')
def test_format_filtering(self):
formats = [
{'format_id': 'A', 'filesize': 500, 'width': 1000},
{'format_id': 'B', 'filesize': 1000, 'width': 500},
{'format_id': 'C', 'filesize': 1000, 'width': 400},
{'format_id': 'D', 'filesize': 2000, 'width': 600},
{'format_id': 'E', 'filesize': 3000},
{'format_id': 'F'},
{'format_id': 'G', 'filesize': 1000000},
]
for f in formats:
f['url'] = 'http://_/'
f['ext'] = 'unknown'
info_dict = _make_result(formats)
ydl = YDL({'format': 'best[filesize<3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'D')
ydl = YDL({'format': 'best[filesize<=3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': 'best[filesize <= ? 3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'F')
ydl = YDL({'format': 'best [filesize = 1000] [width>450]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'B')
ydl = YDL({'format': 'best [filesize = 1000] [width!=450]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'C')
ydl = YDL({'format': '[filesize>?1]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': '[filesize<1M]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': '[filesize<1MiB]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': 'all[width>=400][width<=600]'})
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['B', 'C', 'D'])
ydl = YDL({'format': 'best[height<40]'})
try:
ydl.process_ie_result(info_dict)
except ExtractorError:
pass
self.assertEqual(ydl.downloaded_info_dicts, [])
class TestYoutubeDL(unittest.TestCase):
def test_subtitles(self):
def s_formats(lang, autocaption=False):
return [{
'ext': ext,
'url': 'http://localhost/video.%s.%s' % (lang, ext),
'_auto': autocaption,
} for ext in ['vtt', 'srt', 'ass']]
subtitles = dict((l, s_formats(l)) for l in ['en', 'fr', 'es'])
auto_captions = dict((l, s_formats(l, True)) for l in ['it', 'pt', 'es'])
info_dict = {
'id': 'test',
'title': 'Test',
'url': 'http://localhost/video.mp4',
'subtitles': subtitles,
'automatic_captions': auto_captions,
'extractor': 'TEST',
}
def get_info(params={}):
params.setdefault('simulate', True)
ydl = YDL(params)
ydl.report_warning = lambda *args, **kargs: None
return ydl.process_video_result(info_dict, download=False)
result = get_info()
self.assertFalse(result.get('requested_subtitles'))
self.assertEqual(result['subtitles'], subtitles)
self.assertEqual(result['automatic_captions'], auto_captions)
result = get_info({'writesubtitles': True})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), set(['en']))
self.assertTrue(subs['en'].get('data') is None)
self.assertEqual(subs['en']['ext'], 'ass')
result = get_info({'writesubtitles': True, 'subtitlesformat': 'foo/srt'})
subs = result['requested_subtitles']
self.assertEqual(subs['en']['ext'], 'srt')
result = get_info({'writesubtitles': True, 'subtitleslangs': ['es', 'fr', 'it']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), set(['es', 'fr']))
result = get_info({'writesubtitles': True, 'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), set(['es', 'pt']))
self.assertFalse(subs['es']['_auto'])
self.assertTrue(subs['pt']['_auto'])
result = get_info({'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), set(['es', 'pt']))
self.assertTrue(subs['es']['_auto'])
self.assertTrue(subs['pt']['_auto'])
def test_add_extra_info(self):
test_dict = {
'extractor': 'Foo',
}
extra_info = {
'extractor': 'Bar',
'playlist': 'funny videos',
}
YDL.add_extra_info(test_dict, extra_info)
self.assertEqual(test_dict['extractor'], 'Foo')
self.assertEqual(test_dict['playlist'], 'funny videos')
def test_prepare_filename(self):
info = {
'id': '1234',
'ext': 'mp4',
'width': None,
}
def fname(templ):
ydl = YoutubeDL({'outtmpl': templ})
return ydl.prepare_filename(info)
self.assertEqual(fname('%(id)s.%(ext)s'), '1234.mp4')
self.assertEqual(fname('%(id)s-%(width)s.%(ext)s'), '1234-NA.mp4')
# Replace missing fields with 'NA'
self.assertEqual(fname('%(uploader_date)s-%(id)s.%(ext)s'), 'NA-1234.mp4')
def test_format_note(self):
ydl = YoutubeDL()
self.assertEqual(ydl._format_note({}), '')
assertRegexpMatches(self, ydl._format_note({
'vbr': 10,
}), '^\s*10k$')
assertRegexpMatches(self, ydl._format_note({
'fps': 30,
}), '^30fps$')
def test_postprocessors(self):
filename = 'post-processor-testfile.mp4'
audiofile = filename + '.mp3'
class SimplePP(PostProcessor):
def run(self, info):
with open(audiofile, 'wt') as f:
f.write('EXAMPLE')
return [info['filepath']], info
def run_pp(params, PP):
with open(filename, 'wt') as f:
f.write('EXAMPLE')
ydl = YoutubeDL(params)
ydl.add_post_processor(PP())
ydl.post_process(filename, {'filepath': filename})
run_pp({'keepvideo': True}, SimplePP)
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
os.unlink(filename)
os.unlink(audiofile)
run_pp({'keepvideo': False}, SimplePP)
self.assertFalse(os.path.exists(filename), '%s exists' % filename)
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
os.unlink(audiofile)
class ModifierPP(PostProcessor):
def run(self, info):
with open(info['filepath'], 'wt') as f:
f.write('MODIFIED')
return [], info
run_pp({'keepvideo': False}, ModifierPP)
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
os.unlink(filename)
def test_match_filter(self):
class FilterYDL(YDL):
def __init__(self, *args, **kwargs):
super(FilterYDL, self).__init__(*args, **kwargs)
self.params['simulate'] = True
def process_info(self, info_dict):
super(YDL, self).process_info(info_dict)
def _match_entry(self, info_dict, incomplete):
res = super(FilterYDL, self)._match_entry(info_dict, incomplete)
if res is None:
self.downloaded_info_dicts.append(info_dict)
return res
first = {
'id': '1',
'url': TEST_URL,
'title': 'one',
'extractor': 'TEST',
'duration': 30,
'filesize': 10 * 1024,
}
second = {
'id': '2',
'url': TEST_URL,
'title': 'two',
'extractor': 'TEST',
'duration': 10,
'description': 'foo',
'filesize': 5 * 1024,
}
videos = [first, second]
def get_videos(filter_=None):
ydl = FilterYDL({'match_filter': filter_})
for v in videos:
ydl.process_ie_result(v, download=True)
return [v['id'] for v in ydl.downloaded_info_dicts]
res = get_videos()
self.assertEqual(res, ['1', '2'])
def f(v):
if v['id'] == '1':
return None
else:
return 'Video id is not 1'
res = get_videos(f)
self.assertEqual(res, ['1'])
f = match_filter_func('duration < 30')
res = get_videos(f)
self.assertEqual(res, ['2'])
f = match_filter_func('description = foo')
res = get_videos(f)
self.assertEqual(res, ['2'])
f = match_filter_func('description =? foo')
res = get_videos(f)
self.assertEqual(res, ['1', '2'])
f = match_filter_func('filesize > 5KiB')
res = get_videos(f)
self.assertEqual(res, ['1'])
def test_playlist_items_selection(self):
entries = [{
'id': compat_str(i),
'title': compat_str(i),
'url': TEST_URL,
} for i in range(1, 5)]
playlist = {
'_type': 'playlist',
'id': 'test',
'entries': entries,
'extractor': 'test:playlist',
'extractor_key': 'test:playlist',
'webpage_url': 'http://example.com',
}
def get_ids(params):
ydl = YDL(params)
# make a copy because the dictionary can be modified
ydl.process_ie_result(playlist.copy())
return [int(v['id']) for v in ydl.downloaded_info_dicts]
result = get_ids({})
self.assertEqual(result, [1, 2, 3, 4])
result = get_ids({'playlistend': 10})
self.assertEqual(result, [1, 2, 3, 4])
result = get_ids({'playlistend': 2})
self.assertEqual(result, [1, 2])
result = get_ids({'playliststart': 10})
self.assertEqual(result, [])
result = get_ids({'playliststart': 2})
self.assertEqual(result, [2, 3, 4])
result = get_ids({'playlist_items': '2-4'})
self.assertEqual(result, [2, 3, 4])
result = get_ids({'playlist_items': '2,4'})
self.assertEqual(result, [2, 4])
result = get_ids({'playlist_items': '10'})
self.assertEqual(result, [])
def test_urlopen_no_file_protocol(self):
# see https://github.com/rg3/youtube-dl/issues/8227
ydl = YDL()
self.assertRaises(compat_urllib_error.URLError, ydl.urlopen, 'file:///etc/passwd')
def test_do_not_override_ie_key_in_url_transparent(self):
ydl = YDL()
class Foo1IE(InfoExtractor):
_VALID_URL = r'foo1:'
def _real_extract(self, url):
return {
'_type': 'url_transparent',
'url': 'foo2:',
'ie_key': 'Foo2',
}
class Foo2IE(InfoExtractor):
_VALID_URL = r'foo2:'
def _real_extract(self, url):
return {
'_type': 'url',
'url': 'foo3:',
'ie_key': 'Foo3',
}
class Foo3IE(InfoExtractor):
_VALID_URL = r'foo3:'
def _real_extract(self, url):
return _make_result([{'url': TEST_URL}])
ydl.add_info_extractor(Foo1IE(ydl))
ydl.add_info_extractor(Foo2IE(ydl))
ydl.add_info_extractor(Foo3IE(ydl))
ydl.extract_info('foo1:')
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['url'], TEST_URL)
if __name__ == '__main__':
unittest.main()
# TestYoutubeDL() | 38.422378 | 117 | 0.571782 |
acf2b32db9532afa0e255daf95a2869357698caf | 1,232 | py | Python | DPGAnalysis/Skims/python/dcsonly_json_2012.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | DPGAnalysis/Skims/python/dcsonly_json_2012.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | DPGAnalysis/Skims/python/dcsonly_json_2012.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
from FWCore.PythonUtilities.LumiList import LumiList
from os import environ
from os.path import exists, join
def findFileInPath(theFile):
for s in environ["CMSSW_SEARCH_PATH"].split(":"):
attempt = join(s,theFile)
if exists(attempt):
return attempt
return None
#--------------------------------------------------
# Pick a set of events
# defined by a set of run:luminositysection
#--------------------------------------------------
dcsonly_json_2012_pickEvents = cms.EDFilter(
"PickEvents",
# chose between two definitions for the selection:
# run/lumiSection -based with input from a json file (what THIS example does)
# run/event -based with input from a json file (the historical PickEvents)
IsRunLsBased = cms.bool(True),
# the file listrunev is unused, in this example
RunEventList = cms.untracked.string('DPGAnalysis/Skims/data/listrunev'),
LuminositySectionsBlockRange = LumiList(findFileInPath("DPGAnalysis/Skims/data/json_DCSONLY.txt")).getVLuminosityBlockRange()
)
dcsonly_json_2012 = cms.Sequence( dcsonly_json_2012_pickEvents )
| 35.2 | 129 | 0.637987 |
acf2b375435f30b5a31a90c85d91d38759ff54d8 | 17,455 | py | Python | GimmonixSimulation.py | matabares/NaxcaServer | be3fd1df1d015f5099e1684d6b5b6309e3aeb45d | [
"Apache-2.0"
] | null | null | null | GimmonixSimulation.py | matabares/NaxcaServer | be3fd1df1d015f5099e1684d6b5b6309e3aeb45d | [
"Apache-2.0"
] | null | null | null | GimmonixSimulation.py | matabares/NaxcaServer | be3fd1df1d015f5099e1684d6b5b6309e3aeb45d | [
"Apache-2.0"
] | null | null | null | class GimmonixSimulation:
def GimmonixResponse(self, info):
info.send_response(200)
info.send_header('Content-Type', 'text/xml;charset=UTF-8')
info.end_headers()
contentLen = int(info.headers['Content-Length'])
postBody = info.rfile.read(contentLen)
body = str(postBody, "utf-8")
if "HotelsServiceSearchRequest" in body:
if "<CheckIn>2020-01-01T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room1Adt.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-01-02T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room2Adt1Chd.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-01-03T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room2Adt1Inf.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-01-04T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room2Adt_2Room2Adt.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-01-05T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room1Adt_2Room2Adt1Chd.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-02-01T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_Refundable_3DayStay1Room1Adt.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-02-02T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_Refundable_3DayStay1Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-02-03T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_Refundable_3DayStay1Room2Adt1Inf.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-02-04T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_Refundable_3DayStay1Room2Adt_2Room2Adt.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-03-04T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room2Adt_2Room2Adt-Copy.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-02-05T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_Refundable_3DayStay1Room1Adt_2Room2Adt1Chd.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-03-05T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_3DayStay1Room1Adt_2Room2Adt1Chd_1.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-04-05T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/search_1r1adt_2r2Adt1Chd_refundable.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-05-05T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/search_1r1adt_2r2Adt1Chd_Nonrefundable.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<CheckIn>2020-03-01T00:00:00</CheckIn>" in body:
file = open("providersimulation/gimmonix/hotelSearch_AvailableRoomTest.xml","r", encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "HotelsSupplierDetailsRequest" in body:
if "/110/127631/D20181212T214444/40a3513074ee49c6af2c25b4ae77968f" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room1Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T214843/83f0532f6c42451f98f87f51dd206d6e" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T215245/792f22a832624fbd95372744156eb9d4" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room2Adt1Inf.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T221715/c92583fe35d34159b927ad4afe08ab2d" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T221715/c92583fe35d34159b927ad4afe08ab2f" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room2Adt_2Room2Adt-Copy.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T222007/527af9e839434fdeb78e6d5f44f80e08" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room1Adt_2Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info/110/127631
if "/D20181212T221715/c92583fe35d34159b927ad4afe08abss" in body:
file = open("providersimulation/gimmonix/hotelSupplierDetails_1Room1Adt_2Room2Adt1Chd_1.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20190201T214218/b03ac91508fe41eab79ad63d0e819ca3" in body:
file = open("providersimulation/gimmonix/hotelsupplier_1r1Adt_2r2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20190201T214218/b03ac91508fe41eab79ad63d0e819cb4" in body:
file = open("providersimulation/gimmonix/hotelsupplier_1r1Adt_2r2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "HotelPaymentPreferencesRequest" in body:
if "<PackageID>8a2977d8-2d65-4dfd-a69a-aa566407e52c</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room1Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>0f67d4ec-06b4-4946-81a2-a86c127c7817</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_NonRefundable_3DayStay1Room1Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
#
if "<PackageID>093baf26-c7fe-4f4b-bfc8-13125492bb17</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>2b69f8cb-2a8a-48dc-a0c4-404f0c179a9d</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt1Inf.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>8614c0d5-498e-456d-98a7-c298443ebfd4</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>8614c0d5-498e-456d-98a7-c298443ebfd4</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>8614c0d5-498e-456d-98a7-c298443ebfd9</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>8614c0d5-498e-456d-98a7-c298443ebfd5</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>c6e43dbf-3ba1-4194-8a86-dac5f7d23345</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>663d2e7d-c9eb-4c8e-b99c-23c5e0f494f3</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room1Adt_2Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>43e5a508-76a5-4f5f-a3e7-980703828228</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room1Adt_2Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>43e5a508-76a5-4f5f-a3e7-980703828259</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_3DayStay1Room1Adt_2Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "<PackageID>49106ffc-4adf-4bce-ae01-5c4ae75783c0</PackageID>" in body:
file = open("providersimulation/gimmonix/cancelPolicies_1r1Adt_2r2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "HotelBookRequest" in body:
if "/110/127631/D20181212T214444/40a3513074ee49c6af2c25b4ae77968f" in body:
file = open("providersimulation/gimmonix/successBooking_3DayStay1Room1Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "093baf26-c7fe-4f4b-bfc8-13125492bb17" in body:
file = open("providersimulation/gimmonix/successBooking_3DayStay1Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "/110/127631/D20181212T215245/792f22a832624fbd95372744156eb9d4" in body:
file = open("providersimulation/gimmonix/successBooking_3DayStay1Room2Adt1Inf.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
#
#if "c6e43dbf-3ba1-4194-8a86-dac5f7d23345" in body:
if "8614c0d5-498e-456d-98a7-c298443ebfd4" in body:
file = open("providersimulation/gimmonix/successBooking_3DayStay1Room2Adt_2Room2Adt.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "663d2e7d-c9eb-4c8e-b99c-23c5e0f494f3" in body:
file = open("providersimulation/gimmonix/successBooking_3DayStay1Room1Adt_2Room2Adt1Chd.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "00000000-0000-0000-0000-000000000000" in body:
file = open("providersimulation/gimmonix/errorBooking.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "HotelBookCancelRequest" in body:
if "3898604" in body:
file = open("providersimulation/gimmonix/successBookingCancel.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "3898735" in body:
file = open("providersimulation/gimmonix/successMultipleBookingCancel.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
if "111111" in body:
file = open("providersimulation/gimmonix/onErrorBookingCancel.xml", "r",
encoding='utf8')
data = file.read()
file.close()
info.wfile.write(bytes(data, 'UTF-8'))
return info
| 48.351801 | 138 | 0.535606 |
acf2b7578e87d31a128d9dde1313eb37279d12f2 | 2,202 | py | Python | tests/mcmc_test.py | wearelumenai/distclus4py | 9684eb8b692ef823e2b7326dfb29c60fb6d3020e | [
"MIT"
] | 1 | 2020-04-21T13:10:49.000Z | 2020-04-21T13:10:49.000Z | tests/mcmc_test.py | wearelumenai/distclus4py | 9684eb8b692ef823e2b7326dfb29c60fb6d3020e | [
"MIT"
] | 10 | 2019-07-25T10:30:53.000Z | 2021-07-27T18:09:10.000Z | tests/mcmc_test.py | wearelumenai/distclus4py | 9684eb8b692ef823e2b7326dfb29c60fb6d3020e | [
"MIT"
] | 3 | 2019-08-21T11:20:05.000Z | 2022-01-11T21:19:31.000Z | import time
import unittest
from distclus import MCMC
from tests.util import sample, rmse, nan
import numpy as np
class TestMCMC(unittest.TestCase):
def setUp(self):
self.data = sample(10, 2)
def test_mcmc(self):
algo = MCMC(init_k=2)
self.assertTrue(algo.descr >= 1)
def test_push_run_centroids_predict(self):
algo = MCMC(
init_k=2, b=500, amp=1, seed=654126513379
)
algo.push(self.data[:5])
algo.play()
algo.push(self.data[5:])
time.sleep(.3)
self.check_online(algo)
algo.stop()
def test_context(self):
algo = MCMC(
init_k=2, b=500, amp=0.01, seed=654126513379
)
algo.push(self.data[:5])
algo.play()
algo.push(self.data[5:])
time.sleep(.3)
self.check_online(algo)
algo.stop()
def test_fit_predict(self):
algo = MCMC(init_k=2, b=500, amp=0.1)
algo.fit(self.data)
self.check_static(algo)
def test_iterations(self):
algo = MCMC(init_k=2, b=500, amp=0.1, mcmc_iter=5)
algo.fit(self.data)
self.assertEqual(5, algo.iterations)
def test_acceptations(self):
algo = MCMC(init_k=16, b=500, amp=0.1, mcmc_iter=5)
algo.fit(self.data)
self.assertLessEqual(1, algo.acceptations)
def test_cosinus(self):
algo = MCMC(space="cosinus", init_k=2, b=.5, amp=1)
algo.fit(self.data)
centroids = algo.centroids
self.assertGreater(len(centroids), 1)
def test_nan(self):
data = nan()
self.assertRaises(ValueError, MCMC, data=data)
algo = MCMC()
self.assertRaises(ValueError, algo.fit, data=data)
self.assertRaises(ValueError, algo.predict, data=data)
self.assertRaises(ValueError, algo.push, data=data)
def check_static(self, algo):
_, labels = algo.predict(self.data)
centroids = algo.centroids
self.assertLessEqual(rmse(self.data, centroids, labels), 1.)
def check_online(self, algo):
centroids, labels = algo.predict(self.data)
self.assertLessEqual(rmse(self.data, centroids, labels), 1.)
| 26.214286 | 68 | 0.605359 |
acf2b8724fde5d857e665defda7fe4635ce5ab6c | 4,268 | py | Python | pymatgen/analysis/tests/test_ewald.py | ctoher/pymatgen | 54df358f61fbe60417e90850811b75c1a9e2e230 | [
"MIT"
] | null | null | null | pymatgen/analysis/tests/test_ewald.py | ctoher/pymatgen | 54df358f61fbe60417e90850811b75c1a9e2e230 | [
"MIT"
] | null | null | null | pymatgen/analysis/tests/test_ewald.py | ctoher/pymatgen | 54df358f61fbe60417e90850811b75c1a9e2e230 | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import unicode_literals
import unittest
import os
from pymatgen.analysis.ewald import EwaldSummation, EwaldMinimizer
from pymatgen.io.vaspio.vasp_input import Poscar
import numpy as np
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class EwaldSummationTest(unittest.TestCase):
def test_init(self):
filepath = os.path.join(test_dir, 'POSCAR')
p = Poscar.from_file(filepath)
original_s = p.structure
s = original_s.copy()
s.add_oxidation_state_by_element({"Li": 1, "Fe": 2,
"P": 5, "O": -2})
ham = EwaldSummation(s)
self.assertAlmostEqual(ham.real_space_energy, -354.91294268, 4,
"Real space energy incorrect!")
self.assertAlmostEqual(ham.reciprocal_space_energy, 25.475754801, 4)
self.assertAlmostEqual(ham.point_energy, -790.463835033, 4,
"Point space energy incorrect!")
self.assertAlmostEqual(ham.total_energy, -1119.90102291, 2,
"Total space energy incorrect!")
self.assertAlmostEqual(ham.forces[0,0], -1.98818620e-01, 4,
"Forces incorrect")
self.assertAlmostEqual(sum(sum(abs(ham.forces))), 915.925354346, 4,
"Forces incorrect")
self.assertAlmostEqual(sum(sum(ham.real_space_energy_matrix)),
- 354.91294268, 4,
"Real space energy matrix incorrect!")
self.assertAlmostEqual(sum(sum(ham.reciprocal_space_energy_matrix)),
25.475754801, 4,
"Reciprocal space energy matrix incorrect!")
self.assertAlmostEqual(sum(ham.point_energy_matrix), -790.463835033,
4, "Point space energy matrix incorrect!")
self.assertAlmostEqual(sum(sum(ham.total_energy_matrix)),
- 1119.90102291, 2,
"Total space energy matrix incorrect!")
#note that forces are not individually tested, but should work fine.
self.assertRaises(ValueError, EwaldSummation, original_s)
#try sites with charge.
charges = []
for site in original_s:
if site.specie.symbol == "Li":
charges.append(1)
elif site.specie.symbol == "Fe":
charges.append(2)
elif site.specie.symbol == "P":
charges.append(5)
else:
charges.append(-2)
original_s.add_site_property('charge', charges)
ham2 = EwaldSummation(original_s)
self.assertAlmostEqual(ham2.real_space_energy, -354.91294268, 4,
"Real space energy incorrect!")
class EwaldMinimizerTest(unittest.TestCase):
def test_init(self):
matrix = np.array([[-3., 3., 4., -0., 3., 3., 1., 14., 9., -4.],
[1., -3., -3., 12., -4., -1., 5., 11., 1., 12.],
[14., 7., 13., 15., 13., 5., -5., 10., 14., -2.],
[9., 13., 4., 1., 3., -4., 7., 0., 6., -4.],
[4., -4., 6., 1., 12., -4., -2., 13., 0., 6.],
[13., 7., -4., 12., -2., 9., 8., -5., 3., 1.],
[8., 1., 10., -4., -2., 4., 13., 12., -3., 13.],
[2., 11., 8., 1., -1., 5., -3., 4., 5., 0.],
[-0., 14., 4., 3., -1., -5., 7., -1., -1., 3.],
[2., -2., 10., 1., 6., -5., -3., 12., 0., 13.]])
m_list = [[.9, 4, [1, 2, 3, 4, 8], 'a'], [-1, 2, [5, 6, 7], 'b']]
e_min = EwaldMinimizer(matrix, m_list, 50)
self.assertEqual(len(e_min.output_lists), 15,
"Wrong number of permutations returned")
self.assertAlmostEqual(e_min.minimized_sum, 111.63, 3,
"Returned wrong minimum value")
self.assertEqual(len(e_min.best_m_list), 6,
"Returned wrong number of permutations")
if __name__ == "__main__":
unittest.main()
| 44.926316 | 76 | 0.500703 |
acf2b9ed948e5b02826f882f3671040a89502e7a | 1,192 | py | Python | setup.py | mixilchenko/cronosparser | db62195528ff7bfb9ba428ce7429eea9f01aed76 | [
"MIT"
] | 40 | 2016-02-12T13:47:15.000Z | 2022-03-28T14:08:01.000Z | setup.py | mixilchenko/cronosparser | db62195528ff7bfb9ba428ce7429eea9f01aed76 | [
"MIT"
] | 5 | 2016-07-15T14:56:52.000Z | 2021-01-13T10:03:05.000Z | setup.py | mixilchenko/cronosparser | db62195528ff7bfb9ba428ce7429eea9f01aed76 | [
"MIT"
] | 13 | 2016-04-25T04:39:50.000Z | 2021-04-01T10:44:34.000Z | from setuptools import setup, find_packages
setup(
name='cronosparser',
version='1.0',
description="Parser for CronosPro / CronosPlus database files.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords='files walk index survey',
author='OCCRP',
author_email='tech@occrp.org',
url='http://github.com/occrp/cronosparser',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
package_data={},
include_package_data=True,
zip_safe=False,
test_suite='nose.collector',
install_requires=[
'six',
'click',
],
tests_require=[
'nose',
'coverage',
],
entry_points={
'console_scripts': [
'cronos2csv = cronos.cli:main'
]
}
)
| 27.090909 | 69 | 0.589765 |
acf2bad047ab9811502c35d9916c5e39725ff6f6 | 14,452 | py | Python | src/cct.py | simonlevine/Compact-Transformers | aa2124c3742c875e1d3bdfc122f1291aee1db937 | [
"Apache-2.0"
] | 281 | 2021-04-13T01:17:28.000Z | 2022-03-23T15:18:24.000Z | src/cct.py | miaolin/Compact-Transformers | aa2124c3742c875e1d3bdfc122f1291aee1db937 | [
"Apache-2.0"
] | 49 | 2021-04-16T12:59:55.000Z | 2022-03-18T18:25:27.000Z | src/cct.py | miaolin/Compact-Transformers | aa2124c3742c875e1d3bdfc122f1291aee1db937 | [
"Apache-2.0"
] | 42 | 2021-04-13T01:53:04.000Z | 2022-03-13T06:31:57.000Z | from torch.hub import load_state_dict_from_url
import torch.nn as nn
from .utils.transformers import TransformerClassifier
from .utils.tokenizer import Tokenizer
from .utils.helpers import pe_check, fc_check
try:
from timm.models.registry import register_model
except ImportError:
from .registry import register_model
model_urls = {
'cct_7_3x1_32':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_7_3x1_32_cifar10_300epochs.pth',
'cct_7_3x1_32_sine':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_7_3x1_32_sine_cifar10_5000epochs.pth',
'cct_7_3x1_32_c100':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_7_3x1_32_cifar100_300epochs.pth',
'cct_7_3x1_32_sine_c100':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_7_3x1_32_sine_cifar100_5000epochs.pth',
'cct_7_7x2_224_sine':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_7_7x2_224_flowers102.pth',
'cct_14_7x2_224':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/pretrained/cct_14_7x2_224_imagenet.pth',
'cct_14_7x2_384':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/finetuned/cct_14_7x2_384_imagenet.pth',
'cct_14_7x2_384_fl':
'http://ix.cs.uoregon.edu/~alih/compact-transformers/checkpoints/finetuned/cct_14_7x2_384_flowers102.pth',
}
class CCT(nn.Module):
def __init__(self,
img_size=224,
embedding_dim=768,
n_input_channels=3,
n_conv_layers=1,
kernel_size=7,
stride=2,
padding=3,
pooling_kernel_size=3,
pooling_stride=2,
pooling_padding=1,
dropout=0.,
attention_dropout=0.1,
stochastic_depth=0.1,
num_layers=14,
num_heads=6,
mlp_ratio=4.0,
num_classes=1000,
positional_embedding='learnable',
*args, **kwargs):
super(CCT, self).__init__()
self.tokenizer = Tokenizer(n_input_channels=n_input_channels,
n_output_channels=embedding_dim,
kernel_size=kernel_size,
stride=stride,
padding=padding,
pooling_kernel_size=pooling_kernel_size,
pooling_stride=pooling_stride,
pooling_padding=pooling_padding,
max_pool=True,
activation=nn.ReLU,
n_conv_layers=n_conv_layers,
conv_bias=False)
self.classifier = TransformerClassifier(
sequence_length=self.tokenizer.sequence_length(n_channels=n_input_channels,
height=img_size,
width=img_size),
embedding_dim=embedding_dim,
seq_pool=True,
dropout=dropout,
attention_dropout=attention_dropout,
stochastic_depth=stochastic_depth,
num_layers=num_layers,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
num_classes=num_classes,
positional_embedding=positional_embedding
)
def forward(self, x):
x = self.tokenizer(x)
return self.classifier(x)
def _cct(arch, pretrained, progress,
num_layers, num_heads, mlp_ratio, embedding_dim,
kernel_size=3, stride=None, padding=None,
*args, **kwargs):
stride = stride if stride is not None else max(1, (kernel_size // 2) - 1)
padding = padding if padding is not None else max(1, (kernel_size // 2))
model = CCT(num_layers=num_layers,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
embedding_dim=embedding_dim,
kernel_size=kernel_size,
stride=stride,
padding=padding,
*args, **kwargs)
if pretrained:
if arch in model_urls:
state_dict = load_state_dict_from_url(model_urls[arch],
progress=progress)
state_dict = pe_check(model, state_dict)
state_dict = fc_check(model, state_dict)
model.load_state_dict(state_dict)
else:
raise RuntimeError(f'Variant {arch} does not yet have pretrained weights.')
return model
def cct_2(arch, pretrained, progress, *args, **kwargs):
return _cct(arch, pretrained, progress, num_layers=2, num_heads=2, mlp_ratio=1, embedding_dim=128,
*args, **kwargs)
def cct_4(arch, pretrained, progress, *args, **kwargs):
return _cct(arch, pretrained, progress, num_layers=4, num_heads=2, mlp_ratio=1, embedding_dim=128,
*args, **kwargs)
def cct_6(arch, pretrained, progress, *args, **kwargs):
return _cct(arch, pretrained, progress, num_layers=6, num_heads=4, mlp_ratio=2, embedding_dim=256,
*args, **kwargs)
def cct_7(arch, pretrained, progress, *args, **kwargs):
return _cct(arch, pretrained, progress, num_layers=7, num_heads=4, mlp_ratio=2, embedding_dim=256,
*args, **kwargs)
def cct_14(arch, pretrained, progress, *args, **kwargs):
return _cct(arch, pretrained, progress, num_layers=14, num_heads=6, mlp_ratio=3, embedding_dim=384,
*args, **kwargs)
@register_model
def cct_2_3x2_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_2('cct_2_3x2_32', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_2_3x2_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_2('cct_2_3x2_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_4_3x2_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_4('cct_4_3x2_32', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_4_3x2_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_4('cct_4_3x2_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_6_3x1_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_6('cct_6_3x1_32', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_6_3x1_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_6('cct_6_3x1_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_6_3x2_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_6('cct_6_3x2_32', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_6_3x2_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_6('cct_6_3x2_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x1_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_7('cct_7_3x1_32', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x1_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_7('cct_7_3x1_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x1_32_c100(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=100,
*args, **kwargs):
return cct_7('cct_7_3x1_32_c100', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x1_32_sine_c100(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=100,
*args, **kwargs):
return cct_7('cct_7_3x1_32_sine_c100', pretrained, progress,
kernel_size=3, n_conv_layers=1,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x2_32(pretrained=False, progress=False,
img_size=32, positional_embedding='learnable', num_classes=10,
*args, **kwargs):
return cct_7('cct_7_3x2_32', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_3x2_32_sine(pretrained=False, progress=False,
img_size=32, positional_embedding='sine', num_classes=10,
*args, **kwargs):
return cct_7('cct_7_3x2_32_sine', pretrained, progress,
kernel_size=3, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_7x2_224(pretrained=False, progress=False,
img_size=224, positional_embedding='learnable', num_classes=102,
*args, **kwargs):
return cct_7('cct_7_7x2_224', pretrained, progress,
kernel_size=7, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_7_7x2_224_sine(pretrained=False, progress=False,
img_size=224, positional_embedding='sine', num_classes=102,
*args, **kwargs):
return cct_7('cct_7_7x2_224_sine', pretrained, progress,
kernel_size=7, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_14_7x2_224(pretrained=False, progress=False,
img_size=224, positional_embedding='learnable', num_classes=1000,
*args, **kwargs):
return cct_14('cct_14_7x2_224', pretrained, progress,
kernel_size=7, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_14_7x2_384(pretrained=False, progress=False,
img_size=384, positional_embedding='learnable', num_classes=1000,
*args, **kwargs):
return cct_14('cct_14_7x2_384', pretrained, progress,
kernel_size=7, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
@register_model
def cct_14_7x2_384_fl(pretrained=False, progress=False,
img_size=384, positional_embedding='learnable', num_classes=102,
*args, **kwargs):
return cct_14('cct_14_7x2_384_fl', pretrained, progress,
kernel_size=7, n_conv_layers=2,
img_size=img_size, positional_embedding=positional_embedding,
num_classes=num_classes,
*args, **kwargs)
| 41.528736 | 127 | 0.607044 |
acf2bb1ee3e4ffee979b3e93c3c21a71490c6c7b | 1,092 | py | Python | mycroft/util/lang/parse_en.py | j1nx/mycroft-core | 8fb0167662b97a5ed5d8b5133afbb0aca004f5e3 | [
"Apache-2.0"
] | null | null | null | mycroft/util/lang/parse_en.py | j1nx/mycroft-core | 8fb0167662b97a5ed5d8b5133afbb0aca004f5e3 | [
"Apache-2.0"
] | 5 | 2020-07-16T13:17:57.000Z | 2022-03-12T00:39:37.000Z | mycroft/util/lang/parse_en.py | j1nx/mycroft-core | 8fb0167662b97a5ed5d8b5133afbb0aca004f5e3 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Backwards compatibility with mycroft-core tests to verify functionality."""
from lingua_franca.lang.parse_en import *
from lingua_franca.lang.parse_en import _extract_whole_number_with_text_en
from lingua_franca.lang.parse_en import _extract_decimal_with_text_en
from lingua_franca.lang.parse_common import ReplaceableNumber
from lingua_franca.lang.parse_common import tokenize as _tokenize
from lingua_franca.lang.parse_common import Token as _Token
class _ReplaceableNumber(ReplaceableNumber):
pass
| 40.444444 | 78 | 0.807692 |
acf2bb494418ae440422a5f253631ccc258708fc | 13,427 | py | Python | model_utils.py | markWJJ/xlnet1 | 0b642d14dd8aec7f1e1ecbf7d6942d5faa6be1f0 | [
"Apache-2.0"
] | 1 | 2019-07-01T01:43:42.000Z | 2019-07-01T01:43:42.000Z | model_utils.py | punkyBella/xlnet | be17c294651b6fb2a3edff1383833b0be72e91c9 | [
"Apache-2.0"
] | null | null | null | model_utils.py | punkyBella/xlnet | be17c294651b6fb2a3edff1383833b0be72e91c9 | [
"Apache-2.0"
] | 1 | 2019-11-20T18:47:53.000Z | 2019-11-20T18:47:53.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
import numpy as np
import six
from os.path import join
from six.moves import zip
from absl import flags
import tensorflow as tf
def configure_tpu(FLAGS):
if FLAGS.use_tpu:
tpu_cluster = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
master = tpu_cluster.get_master()
else:
tpu_cluster = None
master = FLAGS.master
session_config = tf.ConfigProto(allow_soft_placement=True)
# Uncomment the following line if you hope to monitor GPU RAM growth
# session_config.gpu_options.allow_growth = True
if FLAGS.use_tpu:
strategy = None
tf.logging.info('Use TPU without distribute strategy.')
elif FLAGS.num_core_per_host == 1:
strategy = None
tf.logging.info('Single device mode.')
else:
strategy = tf.contrib.distribute.MirroredStrategy(
num_gpus=FLAGS.num_core_per_host)
tf.logging.info('Use MirroredStrategy with %d devices.',
strategy.num_replicas_in_sync)
per_host_input = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
run_config = tf.contrib.tpu.RunConfig(
master=master,
model_dir=FLAGS.model_dir,
session_config=session_config,
tpu_config=tf.contrib.tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations,
num_shards=FLAGS.num_hosts * FLAGS.num_core_per_host,
per_host_input_for_training=per_host_input),
keep_checkpoint_max=FLAGS.max_save,
save_checkpoints_secs=None,
save_checkpoints_steps=FLAGS.save_steps,
train_distribute=strategy
)
return run_config
def init_from_checkpoint(FLAGS, global_vars=False):
tvars = tf.global_variables() if global_vars else tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if FLAGS.init_checkpoint is not None:
if FLAGS.init_checkpoint.endswith("latest"):
ckpt_dir = os.path.dirname(FLAGS.init_checkpoint)
init_checkpoint = tf.train.latest_checkpoint(ckpt_dir)
else:
init_checkpoint = FLAGS.init_checkpoint
tf.logging.info("Initialize from the ckpt {}".format(init_checkpoint))
(assignment_map, initialized_variable_names
) = get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if FLAGS.use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
# Log customized initialization
tf.logging.info("**** Global Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
return scaffold_fn
def get_train_op(FLAGS, total_loss, grads_and_vars=None):
global_step = tf.train.get_or_create_global_step()
# increase the learning rate linearly
if FLAGS.warmup_steps > 0:
warmup_lr = (tf.cast(global_step, tf.float32)
/ tf.cast(FLAGS.warmup_steps, tf.float32)
* FLAGS.learning_rate)
else:
warmup_lr = 0.0
# decay the learning rate
if FLAGS.decay_method == "poly":
decay_lr = tf.train.polynomial_decay(
FLAGS.learning_rate,
global_step=global_step - FLAGS.warmup_steps,
decay_steps=FLAGS.train_steps - FLAGS.warmup_steps,
end_learning_rate=FLAGS.learning_rate * FLAGS.min_lr_ratio)
elif FLAGS.decay_method == "cos":
decay_lr = tf.train.cosine_decay(
FLAGS.learning_rate,
global_step=global_step - FLAGS.warmup_steps,
decay_steps=FLAGS.train_steps - FLAGS.warmup_steps,
alpha=FLAGS.min_lr_ratio)
else:
raise ValueError(FLAGS.decay_method)
learning_rate = tf.where(global_step < FLAGS.warmup_steps,
warmup_lr, decay_lr)
if FLAGS.weight_decay == 0:
optimizer = tf.train.AdamOptimizer(
learning_rate=learning_rate,
epsilon=FLAGS.adam_epsilon)
elif FLAGS.weight_decay > 0 and FLAGS.num_core_per_host == 1:
optimizer = AdamWeightDecayOptimizer(
learning_rate=learning_rate,
epsilon=FLAGS.adam_epsilon,
exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"],
weight_decay_rate=FLAGS.weight_decay)
else:
raise ValueError("Do not support `weight_decay > 0` with multi-gpu "
"training so far.")
if FLAGS.use_tpu:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
if grads_and_vars is None:
grads_and_vars = optimizer.compute_gradients(total_loss)
gradients, variables = zip(*grads_and_vars)
clipped, gnorm = tf.clip_by_global_norm(gradients, FLAGS.clip)
train_op = optimizer.apply_gradients(
zip(clipped, variables), global_step=global_step)
# Manually increment `global_step` for AdamWeightDecayOptimizer
if isinstance(optimizer, AdamWeightDecayOptimizer):
new_global_step = global_step + 1
train_op = tf.group(train_op, [global_step.assign(new_global_step)])
return train_op, learning_rate, gnorm
def clean_ckpt(_):
input_ckpt = FLAGS.clean_input_ckpt
output_model_dir = FLAGS.clean_output_model_dir
tf.reset_default_graph()
var_list = tf.contrib.framework.list_variables(input_ckpt)
var_values, var_dtypes = {}, {}
for (name, shape) in var_list:
if not name.startswith("global_step") and "adam" not in name.lower():
var_values[name] = None
tf.logging.info("Include {}".format(name))
else:
tf.logging.info("Exclude {}".format(name))
tf.logging.info("Loading from {}".format(input_ckpt))
reader = tf.contrib.framework.load_checkpoint(input_ckpt)
for name in var_values:
tensor = reader.get_tensor(name)
var_dtypes[name] = tensor.dtype
var_values[name] = tensor
with tf.variable_scope(tf.get_variable_scope(), reuse=tf.AUTO_REUSE):
tf_vars = [
tf.get_variable(v, shape=var_values[v].shape, dtype=var_dtypes[v])
for v in var_values
]
placeholders = [tf.placeholder(v.dtype, shape=v.shape) for v in tf_vars]
assign_ops = [tf.assign(v, p) for (v, p) in zip(tf_vars, placeholders)]
global_step = tf.Variable(
0, name="global_step", trainable=False, dtype=tf.int64)
saver = tf.train.Saver(tf.all_variables())
if not tf.gfile.Exists(output_model_dir):
tf.gfile.MakeDirs(output_model_dir)
# Build a model consisting only of variables, set them to the average values.
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for p, assign_op, (name, value) in zip(placeholders, assign_ops,
six.iteritems(var_values)):
sess.run(assign_op, {p: value})
# Use the built saver to save the averaged checkpoint.
saver.save(sess, join(output_model_dir, "model.ckpt"),
global_step=global_step)
def avg_checkpoints(model_dir, output_model_dir, last_k):
tf.reset_default_graph()
checkpoint_state = tf.train.get_checkpoint_state(model_dir)
checkpoints = checkpoint_state.all_model_checkpoint_paths[- last_k:]
var_list = tf.contrib.framework.list_variables(checkpoints[0])
var_values, var_dtypes = {}, {}
for (name, shape) in var_list:
if not name.startswith("global_step"):
var_values[name] = np.zeros(shape)
for checkpoint in checkpoints:
reader = tf.contrib.framework.load_checkpoint(checkpoint)
for name in var_values:
tensor = reader.get_tensor(name)
var_dtypes[name] = tensor.dtype
var_values[name] += tensor
tf.logging.info("Read from checkpoint %s", checkpoint)
for name in var_values: # Average.
var_values[name] /= len(checkpoints)
with tf.variable_scope(tf.get_variable_scope(), reuse=tf.AUTO_REUSE):
tf_vars = [
tf.get_variable(v, shape=var_values[v].shape, dtype=var_dtypes[v])
for v in var_values
]
placeholders = [tf.placeholder(v.dtype, shape=v.shape) for v in tf_vars]
assign_ops = [tf.assign(v, p) for (v, p) in zip(tf_vars, placeholders)]
global_step = tf.Variable(
0, name="global_step", trainable=False, dtype=tf.int64)
saver = tf.train.Saver(tf.all_variables())
# Build a model consisting only of variables, set them to the average values.
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for p, assign_op, (name, value) in zip(placeholders, assign_ops,
six.iteritems(var_values)):
sess.run(assign_op, {p: value})
# Use the built saver to save the averaged checkpoint.
saver.save(sess, join(output_model_dir, "model.ckpt"),
global_step=global_step)
def get_assignment_map_from_checkpoint(tvars, init_checkpoint):
"""Compute the union of the current variables and checkpoint variables."""
assignment_map = {}
initialized_variable_names = {}
name_to_variable = collections.OrderedDict()
for var in tvars:
name = var.name
m = re.match("^(.*):\\d+$", name)
if m is not None:
name = m.group(1)
name_to_variable[name] = var
init_vars = tf.train.list_variables(init_checkpoint)
assignment_map = collections.OrderedDict()
for x in init_vars:
(name, var) = (x[0], x[1])
# tf.logging.info('original name: %s', name)
if name not in name_to_variable:
continue
# assignment_map[name] = name
assignment_map[name] = name_to_variable[name]
initialized_variable_names[name] = 1
initialized_variable_names[name + ":0"] = 1
return (assignment_map, initialized_variable_names)
class AdamWeightDecayOptimizer(tf.train.Optimizer):
"""A basic Adam optimizer that includes "correct" L2 weight decay."""
def __init__(self,
learning_rate,
weight_decay_rate=0.0,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-6,
exclude_from_weight_decay=None,
include_in_weight_decay=["r_s_bias", "r_r_bias", "r_w_bias"],
name="AdamWeightDecayOptimizer"):
"""Constructs a AdamWeightDecayOptimizer."""
super(AdamWeightDecayOptimizer, self).__init__(False, name)
self.learning_rate = learning_rate
self.weight_decay_rate = weight_decay_rate
self.beta_1 = beta_1
self.beta_2 = beta_2
self.epsilon = epsilon
self.exclude_from_weight_decay = exclude_from_weight_decay
self.include_in_weight_decay = include_in_weight_decay
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""See base class."""
assignments = []
for (grad, param) in grads_and_vars:
if grad is None or param is None:
continue
param_name = self._get_variable_name(param.name)
m = tf.get_variable(
name=param_name + "/adam_m",
shape=param.shape.as_list(),
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
v = tf.get_variable(
name=param_name + "/adam_v",
shape=param.shape.as_list(),
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
# Standard Adam update.
next_m = (
tf.multiply(self.beta_1, m) + tf.multiply(1.0 - self.beta_1, grad))
next_v = (
tf.multiply(self.beta_2, v) + tf.multiply(1.0 - self.beta_2,
tf.square(grad)))
update = next_m / (tf.sqrt(next_v) + self.epsilon)
# Just adding the square of the weights to the loss function is *not*
# the correct way of using L2 regularization/weight decay with Adam,
# since that will interact with the m and v parameters in strange ways.
#
# Instead we want ot decay the weights in a manner that doesn't interact
# with the m/v parameters. This is equivalent to adding the square
# of the weights to the loss with plain (non-momentum) SGD.
if self._do_use_weight_decay(param_name):
update += self.weight_decay_rate * param
update_with_lr = self.learning_rate * update
next_param = param - update_with_lr
assignments.extend(
[param.assign(next_param),
m.assign(next_m),
v.assign(next_v)])
return tf.group(*assignments, name=name)
def _do_use_weight_decay(self, param_name):
"""Whether to use L2 weight decay for `param_name`."""
if not self.weight_decay_rate:
return False
for r in self.include_in_weight_decay:
if re.search(r, param_name) is not None:
return True
if self.exclude_from_weight_decay:
for r in self.exclude_from_weight_decay:
if re.search(r, param_name) is not None:
tf.logging.info('Adam WD excludes {}'.format(param_name))
return False
return True
def _get_variable_name(self, param_name):
"""Get the variable name from the tensor name."""
m = re.match("^(.*):\\d+$", param_name)
if m is not None:
param_name = m.group(1)
return param_name
if __name__ == "__main__":
flags.DEFINE_string("clean_input_ckpt", "", "input ckpt for cleaning")
flags.DEFINE_string("clean_output_model_dir", "", "output dir for cleaned ckpt")
FLAGS = flags.FLAGS
tf.app.run(clean_ckpt)
| 35.334211 | 82 | 0.688761 |
acf2bb643db6bf3876cd79b34d8974476e508c1b | 239 | py | Python | lambda.py | tensult/bucket-antivirus-function | 9c8d4d359c55cffcfc0518d55e3fe92742f90c36 | [
"Apache-2.0"
] | 1 | 2021-09-13T05:45:07.000Z | 2021-09-13T05:45:07.000Z | lambda.py | tensult/bucket-antivirus-function | 9c8d4d359c55cffcfc0518d55e3fe92742f90c36 | [
"Apache-2.0"
] | null | null | null | lambda.py | tensult/bucket-antivirus-function | 9c8d4d359c55cffcfc0518d55e3fe92742f90c36 | [
"Apache-2.0"
] | null | null | null | import update
import scan
def lambda_handler(event, context):
if event != None and event.get("type", "SCAN") == "SCAN":
return scan.lambda_handler(event, context)
else:
return update.lambda_handler(event, context)
| 26.555556 | 61 | 0.682008 |
acf2bb96f614210282a6d9db10f0ff764be7b34d | 2,207 | py | Python | my/goodreads.py | aluhrs13/HPI | e750666e30e8987f3a4c46755857dc85dd64446c | [
"MIT"
] | 1,026 | 2020-03-16T16:53:29.000Z | 2022-03-29T16:03:38.000Z | my/goodreads.py | aluhrs13/HPI | e750666e30e8987f3a4c46755857dc85dd64446c | [
"MIT"
] | 102 | 2020-03-18T22:53:29.000Z | 2022-03-22T00:34:46.000Z | my/goodreads.py | aluhrs13/HPI | e750666e30e8987f3a4c46755857dc85dd64446c | [
"MIT"
] | 50 | 2020-03-17T21:00:34.000Z | 2022-03-28T08:37:13.000Z | """
[[https://www.goodreads.com][Goodreads]] statistics
"""
REQUIRES = [
'git+https://github.com/karlicoss/goodrexport',
]
from dataclasses import dataclass
from my.core import Paths
from my.config import goodreads as user_config
@dataclass
class goodreads(user_config):
# paths[s]/glob to the exported JSON data
export_path: Paths
from my.core.cfg import make_config, Attrs
def _migration(attrs: Attrs) -> Attrs:
export_dir = 'export_dir'
if export_dir in attrs: # legacy name
attrs['export_path'] = attrs[export_dir]
from my.core.warnings import high
high(f'"{export_dir}" is deprecated! Please use "export_path" instead."')
return attrs
config = make_config(goodreads, migration=_migration)
#############################3
from my.core import get_files
from typing import Sequence, Iterator
from pathlib import Path
def inputs() -> Sequence[Path]:
return get_files(config.export_path)
from datetime import datetime
import pytz
from goodrexport import dal
def _dal() -> dal.DAL:
return dal.DAL(inputs())
def reviews() -> Iterator[dal.Review]:
return _dal().reviews()
# todo should be in DAL?
def books() -> Iterator[dal.Book]:
for r in reviews():
yield r.book
#######
# todo ok, not sure these really belong here...
from my.core.common import datetime_aware
@dataclass
class Event:
dt: datetime_aware
summary: str
eid: str
def events() -> Iterator[Event]:
for b in books():
yield Event(
dt=b.date_added,
summary=f'Added book "{b.title}"', # todo shelf?
eid=b.id
)
# todo finished? other updates?
def print_read_history() -> None:
def ddate(x):
if x is None:
return datetime.fromtimestamp(0, pytz.utc)
else:
return x
def key(b):
return ddate(b.date_started)
def fmtdt(dt):
if dt is None:
return dt
tz = pytz.timezone('Europe/London')
return dt.astimezone(tz)
for b in sorted(books(), key=key):
print(f"""
{b.title} by {', '.join(b.authors)}
started : {fmtdt(b.date_started)}
finished: {fmtdt(b.date_read)}
""")
| 21.427184 | 81 | 0.632986 |
acf2bcf4adc62643c8a86f5b959f551527e57eca | 396 | py | Python | exponentiation.py | priyadarshan1995/python-algorithms | 5d458e07f734b337c3d1152757b0a5c48bf5d4e0 | [
"MIT"
] | 5 | 2020-08-20T17:06:50.000Z | 2020-11-07T09:11:30.000Z | exponentiation.py | freelancing-solutions/python-algorithms | 34bea3245497f3e0586d6dfb43438b6498d7a245 | [
"MIT"
] | 14 | 2020-10-16T16:31:33.000Z | 2020-11-10T01:54:27.000Z | exponentiation.py | freelancing-solutions/python-algorithms | 34bea3245497f3e0586d6dfb43438b6498d7a245 | [
"MIT"
] | 17 | 2020-10-16T17:49:23.000Z | 2020-11-03T09:58:27.000Z | '''Efficient approach'''
def bin_pow(a,n):
res = 1
while(n>0):
if n&1:
res = res * a
a = a * a
n >>= 1
return res
print(bin_pow(2101010,32)) #20782597351780148360705447543438026166282198412508937706220746225693908693563681788663888827908012942663696008670876015403914749631306435949260373147394419808970340368320100000000000000000000000000000000
''' Time complexity : O(log n) '''
| 28.285714 | 231 | 0.777778 |
acf2be15daf166cb13b18017236bf7cb4305e342 | 3,646 | py | Python | tools/build_usd.py | JasonWeiseUnreal/BlenderUSDHydraAddon | 029dec5586b4780bdcb461f0b82a27708ded1bc8 | [
"Apache-2.0"
] | null | null | null | tools/build_usd.py | JasonWeiseUnreal/BlenderUSDHydraAddon | 029dec5586b4780bdcb461f0b82a27708ded1bc8 | [
"Apache-2.0"
] | null | null | null | tools/build_usd.py | JasonWeiseUnreal/BlenderUSDHydraAddon | 029dec5586b4780bdcb461f0b82a27708ded1bc8 | [
"Apache-2.0"
] | null | null | null | #**********************************************************************
# Copyright 2020 Advanced Micro Devices, Inc
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#********************************************************************
import sys
import os
from pathlib import Path
from build import rm_dir, check_call, OS
def main(bin_dir, clean, build_var, *args):
if len(args) == 1 and args[0] in ("--help", "-h"):
print("""
Usage
build_usd.py <bin-dir> [<args-for-USD/build_scripts/build_usd.py>...]
Specify arguments for build_scripts/build_usd.py script
in USD repository.
""")
return
repo_dir = Path(__file__).parent.parent
usd_dir = repo_dir / "deps/USD"
if clean:
rm_dir(bin_dir / "USD")
cur_dir = os.getcwd()
os.chdir(str(usd_dir))
try:
# applying patch data/USD_MaterialX.patch
# Temporary implements https://github.com/PixarAnimationStudios/USD/pull/1610
# TODO: remove this after up USD to >= 2203 and implement their own fix
# https://github.com/PixarAnimationStudios/USD/commit/adfc04eea92b91965b0da68503539b079a5d30d9
check_call('git', 'apply', '--whitespace=nowarn', str(repo_dir / "tools/data/USD_MaterialX.patch"))
# applying patch data/USD_deps.patch
# fixes issues with building USD on python 3.10
check_call('git', 'apply', str(repo_dir / "tools/data/USD_deps.patch"))
# modifying pxr/usdImaging/CMakeLists.txt
usd_imaging_lite_path = repo_dir / "deps/UsdImagingLite/pxr/usdImaging/usdImagingLite"
usd_imaging_cmake = usd_dir / "pxr/usdImaging/CMakeLists.txt"
print("Modifying:", usd_imaging_cmake)
cmake_txt = usd_imaging_cmake.read_text()
usd_imaging_cmake.write_text(cmake_txt + f"""
add_subdirectory("{usd_imaging_lite_path.absolute().as_posix()}" usdImagingLite)
""")
bin_usd_dir = bin_dir / "USD"
build_args = [f'MATERIALX,-DMATERIALX_BUILD_PYTHON=ON -DMATERIALX_INSTALL_PYTHON=OFF '
f'-DMATERIALX_PYTHON_EXECUTABLE="{sys.executable}"']
if build_var == 'relwithdebuginfo' and OS == 'Windows':
# disabling optimization for debug purposes
build_args.append(f'USD,-DCMAKE_CXX_FLAGS_RELWITHDEBINFO="/Od"')
call_args = (sys.executable, str(usd_dir / "build_scripts/build_usd.py"),
'--verbose',
'--build', str(bin_usd_dir / "build"),
'--src', str(bin_usd_dir / "deps"),
'--materialx',
'--openvdb',
'--build-args', *build_args,
'--python',
'--force', "OpenSubDiv",
'--build-variant', build_var,
str(bin_usd_dir / "install"),
*args)
try:
check_call(*call_args)
finally:
print("Reverting USD repo")
check_call('git', 'checkout', '--', '*')
check_call('git', 'clean', '-f')
finally:
os.chdir(cur_dir)
if __name__ == "__main__":
main(*sys.argv[1:])
| 37.204082 | 107 | 0.60203 |
acf2be2ddf29880e900f6b85dbd10c4e089be341 | 145 | py | Python | Python/Program to print half pyramid with stars.py | ShaileshKumar007/Basic-Programming-Quesions | 5fb8b9a3cd2af7a77ff18d6d064fced518733d12 | [
"MIT"
] | 1 | 2021-09-12T15:50:05.000Z | 2021-09-12T15:50:05.000Z | Python/Program to print half pyramid with stars.py | ShaileshKumar007/Basic-Programming-Quesions | 5fb8b9a3cd2af7a77ff18d6d064fced518733d12 | [
"MIT"
] | 2 | 2021-09-11T19:32:41.000Z | 2021-10-02T07:59:21.000Z | Python/Program to print half pyramid with stars.py | ShaileshKumar007/Basic-Programming-Quesions | 5fb8b9a3cd2af7a77ff18d6d064fced518733d12 | [
"MIT"
] | 2 | 2021-09-11T16:42:44.000Z | 2021-09-12T17:23:34.000Z | NumOfRows = int(input("Enter number of rows: "))
for i in range(NumOfRows):
for j in range(i+1):
print("* ", end="")
print("\n") | 24.166667 | 48 | 0.558621 |
acf2bfb12651c71c40b28c92befe02d2cd2c33fd | 2,227 | py | Python | physiossl/criterion/simclr_infonce_loss.py | larryshaw0079/PhysioLearn | 6438924a1b2a0c2ce4c238f504654f9a7f993d9e | [
"MIT"
] | 2 | 2021-12-11T15:17:47.000Z | 2021-12-27T07:39:31.000Z | physiossl/criterion/simclr_infonce_loss.py | larryshaw0079/PhysioSSL | 6438924a1b2a0c2ce4c238f504654f9a7f993d9e | [
"MIT"
] | null | null | null | physiossl/criterion/simclr_infonce_loss.py | larryshaw0079/PhysioSSL | 6438924a1b2a0c2ce4c238f504654f9a7f993d9e | [
"MIT"
] | null | null | null | """
@Time : 2021/10/13 16:07
@File : simclr_infonce_loss.py
@Software: PyCharm
@Desc :
"""
import itertools
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from physiossl.dist.utils import gather_tensor_sync, is_distributed_enabled
class SimCLRInfoNCELoss(nn.Module):
def __init__(self, T: float = 1.0):
super(SimCLRInfoNCELoss, self).__init__()
self.T = T
self.criterion = nn.CrossEntropyLoss()
self.mask = None
def forward(self, z1: torch.Tensor, z2: torch.Tensor):
"""
Args:
z1 (torch.Tensor): with shape (B, *, F)
z2 (torch.Tensor): with shape (B, *, F)
Returns:
"""
assert z1.shape == z2.shape
if is_distributed_enabled():
z1 = gather_tensor_sync(z1)
z2 = gather_tensor_sync(z2)
B, *_ = z1.shape ## (batch, *, feature_dim)
feature_dims = z1.shape[:-1] ## without the representation dim
dims_prod = np.prod(feature_dims)
z1 = F.normalize(z1, p=2, dim=-1)
z2 = F.normalize(z2, p=2, dim=-1)
pos_logits = (z1 * z2).sum(-1).unsqueeze(-1)
# neg_logits = torch.einsum('ijk,mnk->ijnm', [z1, z2])
neg_logits = torch.tensordot(z1, z2.T, dims=1)
if self.mask is None:
# identifiers = torch.arange(np.prod(z1.shape[:-1]), dtype=torch.long, device=z1.device).view(*z1.shape[:-1]).unsqueeze(-1)
# mask = torch.eq(identifiers, identifiers.T)
mask = torch.ones(*feature_dims, *feature_dims[::-1], dtype=torch.bool, device=z1.device)
for idx_tuple in itertools.product(*(range(s) for s in feature_dims)):
mask[idx_tuple + idx_tuple[::-1]] = False
self.mask = mask
neg_logits = neg_logits.masked_select(self.mask).view(*feature_dims, dims_prod - 1) ## mask out selves
logits = torch.cat([pos_logits, neg_logits], dim=-1)
logits = logits.view(dims_prod, dims_prod)
if self.T is not None:
logits /= self.T
label = torch.zeros(dims_prod, dtype=torch.long, device=z1.device)
loss = self.criterion(logits, label)
return loss
| 31.814286 | 135 | 0.59991 |
acf2c06348ab081973acde679beedd36ceb68335 | 109,946 | py | Python | LocationList.py | Triaphlax/OoT-Randomizer | d6755f8e4b1c6b5d5612e583f8f91fa303f1669b | [
"MIT"
] | null | null | null | LocationList.py | Triaphlax/OoT-Randomizer | d6755f8e4b1c6b5d5612e583f8f91fa303f1669b | [
"MIT"
] | null | null | null | LocationList.py | Triaphlax/OoT-Randomizer | d6755f8e4b1c6b5d5612e583f8f91fa303f1669b | [
"MIT"
] | null | null | null | from collections import OrderedDict
def shop_address(shop_id, shelf_id):
return 0xC71ED0 + (0x40 * shop_id) + (0x08 * shelf_id)
# Abbreviations
# DMC Death Mountain Crater
# DMT Death Mountain Trail
# GC Goron City
# GF Gerudo Fortress
# GS Gold Skulltula
# GV Gerudo Valley
# HC Hyrule Castle
# HF Hyrule Field
# KF Kokiri Forest
# LH Lake Hylia
# LLR Lon Lon Ranch
# LW Lost Woods
# OGC Outside Ganon's Castle
# SFM Sacred Forest Meadow
# ToT Temple of Time
# ZD Zora's Domain
# ZF Zora's Fountain
# ZR Zora's River
# The order of this table is reflected in the spoiler's list of locations (except Hints aren't included).
# Within a section, the order of types is: gifts/freestanding/chests, Deku Scrubs, Cows, Gold Skulltulas, Shops.
# NPC Scrubs are on the overworld, while GrottoNPC is a special handler for Grottos
# Grottos scrubs are the same scene and actor, so we use a unique grotto ID for the scene
# Note that the scene for skulltulas is not the actual scene the token appears in
# Rather, it is the index of the grouping used when storing skulltula collection
# For example, zora river, zora's domain, and zora fountain are all a single 'scene' for skulltulas
# Location: Type Scene Default Addresses Categories
location_table = OrderedDict([
## Dungeon Rewards
("Links Pocket", ("Boss", None, None, None, None)),
("Queen Gohma", ("Boss", None, 0x6C, (0x0CA315F, 0x2079571), None)),
("King Dodongo", ("Boss", None, 0x6D, (0x0CA30DF, 0x2223309), None)),
("Barinade", ("Boss", None, 0x6E, (0x0CA36EB, 0x2113C19), None)),
("Phantom Ganon", ("Boss", None, 0x66, (0x0CA3D07, 0x0D4ED79), None)),
("Volvagia", ("Boss", None, 0x67, (0x0CA3D93, 0x0D10135), None)),
("Morpha", ("Boss", None, 0x68, (0x0CA3E1F, 0x0D5A3A9), None)),
("Bongo Bongo", ("Boss", None, 0x6A, (0x0CA3F43, 0x0D13E19), None)),
("Twinrova", ("Boss", None, 0x69, (0x0CA3EB3, 0x0D39FF1), None)),
("Ganon", ("Event", None, None, None, None)),
## Songs
("Song from Impa", ("Song", 0xFF, 0x26, (0x2E8E925, 0x2E8E925), ("Hyrule Castle", "Market", "Songs"))),
("Song from Malon", ("Song", 0xFF, 0x27, (0x0D7EB53, 0x0D7EBCF), ("Lon Lon Ranch", "Songs",))),
("Song from Saria", ("Song", 0xFF, 0x28, (0x20B1DB1, 0x20B1DB1), ("Sacred Forest Meadow", "Forest", "Songs"))),
("Song from Composers Grave", ("Song", 0xFF, 0x29, (0x332A871, 0x332A871), ("the Graveyard", "Kakariko", "Songs"))),
("Song from Ocarina of Time", ("Song", 0xFF, 0x2A, (0x252FC89, 0x252FC89), ("Hyrule Field", "Songs", "Need Spiritual Stones"))),
("Song from Windmill", ("Song", 0xFF, 0x2B, (0x0E42C07, 0x0E42B8B), ("Kakariko Village", "Kakariko", "Songs"))),
("Sheik in Forest", ("Song", 0xFF, 0x20, (0x20B0809, 0x20B0809), ("Sacred Forest Meadow", "Forest", "Songs"))),
("Sheik in Crater", ("Song", 0xFF, 0x21, (0x224D7F1, 0x224D7F1), ("Death Mountain Crater", "Death Mountain", "Songs"))),
("Sheik in Ice Cavern", ("Song", 0xFF, 0x22, (0x2BEC889, 0x2BEC889), ("Ice Cavern", "Songs",))),
("Sheik at Colossus", ("Song", 0xFF, 0x23, (0x218C57D, 0x218C57D), ("Desert Colossus", "Songs",))),
("Sheik in Kakariko", ("Song", 0xFF, 0x24, (0x2000FE1, 0x2000FE1), ("Kakariko Village", "Kakariko", "Songs"))),
("Sheik at Temple", ("Song", 0xFF, 0x25, (0x2531329, 0x2531329), ("Temple of Time", "Market", "Songs"))),
## Overworld
# Kokiri Forest
("KF Midos Top Left Chest", ("Chest", 0x28, 0x00, None, ("Kokiri Forest", "Forest",))),
("KF Midos Top Right Chest", ("Chest", 0x28, 0x01, None, ("Kokiri Forest", "Forest",))),
("KF Midos Bottom Left Chest", ("Chest", 0x28, 0x02, None, ("Kokiri Forest", "Forest",))),
("KF Midos Bottom Right Chest", ("Chest", 0x28, 0x03, None, ("Kokiri Forest", "Forest",))),
("KF Kokiri Sword Chest", ("Chest", 0x55, 0x00, None, ("Kokiri Forest", "Forest",))),
("KF Storms Grotto Chest", ("Chest", 0x3E, 0x0C, None, ("Kokiri Forest", "Forest", "Grottos"))),
("KF Links House Cow", ("NPC", 0x34, 0x15, None, ("KF Links House", "Forest", "Cow", "Minigames"))),
("KF GS Know It All House", ("GS Token", 0x0C, 0x02, None, ("Kokiri Forest", "Skulltulas",))),
("KF GS Bean Patch", ("GS Token", 0x0C, 0x01, None, ("Kokiri Forest", "Skulltulas",))),
("KF GS House of Twins", ("GS Token", 0x0C, 0x04, None, ("Kokiri Forest", "Skulltulas",))),
("KF Shop Item 1", ("Shop", 0x2D, 0x30, (shop_address(0, 0), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 2", ("Shop", 0x2D, 0x31, (shop_address(0, 1), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 3", ("Shop", 0x2D, 0x32, (shop_address(0, 2), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 4", ("Shop", 0x2D, 0x33, (shop_address(0, 3), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 5", ("Shop", 0x2D, 0x34, (shop_address(0, 4), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 6", ("Shop", 0x2D, 0x35, (shop_address(0, 5), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 7", ("Shop", 0x2D, 0x36, (shop_address(0, 6), None), ("Kokiri Forest", "Forest", "Shops"))),
("KF Shop Item 8", ("Shop", 0x2D, 0x37, (shop_address(0, 7), None), ("Kokiri Forest", "Forest", "Shops"))),
# Lost Woods
("LW Gift from Saria", ("Cutscene", 0xFF, 0x02, None, ("the Lost Woods", "Forest",))),
("LW Ocarina Memory Game", ("NPC", 0x5B, 0x76, None, ("the Lost Woods", "Forest", "Minigames"))),
("LW Target in Woods", ("NPC", 0x5B, 0x60, None, ("the Lost Woods", "Forest",))),
("LW Near Shortcuts Grotto Chest", ("Chest", 0x3E, 0x14, None, ("the Lost Woods", "Forest", "Grottos"))),
("Deku Theater Skull Mask", ("NPC", 0x3E, 0x77, None, ("the Lost Woods", "Forest", "Grottos"))),
("Deku Theater Mask of Truth", ("NPC", 0x3E, 0x7A, None, ("the Lost Woods", "Forest", "Need Spiritual Stones", "Grottos"))),
("LW Skull Kid", ("NPC", 0x5B, 0x3E, None, ("the Lost Woods", "Forest",))),
("LW Deku Scrub Near Bridge", ("NPC", 0x5B, 0x77, None, ("the Lost Woods", "Forest", "Deku Scrub", "Deku Scrub Upgrades"))),
("LW Deku Scrub Near Deku Theater Left", ("NPC", 0x5B, 0x31, None, ("the Lost Woods", "Forest", "Deku Scrub"))),
("LW Deku Scrub Near Deku Theater Right", ("NPC", 0x5B, 0x30, None, ("the Lost Woods", "Forest", "Deku Scrub"))),
("LW Deku Scrub Grotto Front", ("GrottoNPC", 0xF5, 0x79, None, ("the Lost Woods", "Forest", "Deku Scrub", "Deku Scrub Upgrades", "Grottos"))),
("LW Deku Scrub Grotto Rear", ("GrottoNPC", 0xF5, 0x33, None, ("the Lost Woods", "Forest", "Deku Scrub", "Grottos"))),
("LW GS Bean Patch Near Bridge", ("GS Token", 0x0D, 0x01, None, ("the Lost Woods", "Skulltulas",))),
("LW GS Bean Patch Near Theater", ("GS Token", 0x0D, 0x02, None, ("the Lost Woods", "Skulltulas",))),
("LW GS Above Theater", ("GS Token", 0x0D, 0x04, None, ("the Lost Woods", "Skulltulas",))),
# Sacred Forest Meadow
("SFM Wolfos Grotto Chest", ("Chest", 0x3E, 0x11, None, ("Sacred Forest Meadow", "Forest", "Grottos"))),
("SFM Deku Scrub Grotto Front", ("GrottoNPC", 0xEE, 0x3A, None, ("Sacred Forest Meadow", "Forest", "Deku Scrub", "Grottos"))),
("SFM Deku Scrub Grotto Rear", ("GrottoNPC", 0xEE, 0x39, None, ("Sacred Forest Meadow", "Forest", "Deku Scrub", "Grottos"))),
("SFM GS", ("GS Token", 0x0D, 0x08, None, ("Sacred Forest Meadow", "Skulltulas",))),
# Hyrule Field
("HF Ocarina of Time Item", ("NPC", 0x51, 0x0C, None, ("Hyrule Field", "Need Spiritual Stones",))),
("HF Near Market Grotto Chest", ("Chest", 0x3E, 0x00, None, ("Hyrule Field", "Grottos",))),
("HF Tektite Grotto Freestanding PoH", ("Collectable", 0x3E, 0x01, None, ("Hyrule Field", "Grottos",))),
("HF Southeast Grotto Chest", ("Chest", 0x3E, 0x02, None, ("Hyrule Field", "Grottos",))),
("HF Open Grotto Chest", ("Chest", 0x3E, 0x03, None, ("Hyrule Field", "Grottos",))),
("HF Deku Scrub Grotto", ("GrottoNPC", 0xE6, 0x3E, None, ("Hyrule Field", "Deku Scrub", "Deku Scrub Upgrades", "Grottos"))),
("HF Cow Grotto Cow", ("NPC", 0x3E, 0x16, None, ("Hyrule Field", "Cow", "Grottos"))),
("HF GS Cow Grotto", ("GS Token", 0x0A, 0x01, None, ("Hyrule Field", "Skulltulas", "Grottos"))),
("HF GS Near Kak Grotto", ("GS Token", 0x0A, 0x02, None, ("Hyrule Field", "Skulltulas", "Grottos"))),
# Market
("Market Shooting Gallery Reward", ("NPC", 0x42, 0x60, None, ("the Market", "Market", "Minigames"))),
("Market Bombchu Bowling First Prize", ("NPC", 0x4B, 0x34, None, ("the Market", "Market", "Minigames"))),
("Market Bombchu Bowling Second Prize", ("NPC", 0x4B, 0x3E, None, ("the Market", "Market", "Minigames"))),
("Market Bombchu Bowling Bombchus", ("NPC", 0x4B, None, None, ("the Market", "Market", "Minigames"))),
("Market Lost Dog", ("NPC", 0x35, 0x3E, None, ("the Market", "Market",))),
("Market Treasure Chest Game Reward", ("Chest", 0x10, 0x0A, None, ("the Market", "Market", "Minigames"))),
("Market 10 Big Poes", ("NPC", 0x4D, 0x0F, None, ("the Market", "Hyrule Castle",))),
("Market GS Guard House", ("GS Token", 0x0E, 0x08, None, ("the Market", "Skulltulas",))),
("Market Bazaar Item 1", ("Shop", 0x2C, 0x30, (shop_address(4, 0), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 2", ("Shop", 0x2C, 0x31, (shop_address(4, 1), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 3", ("Shop", 0x2C, 0x32, (shop_address(4, 2), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 4", ("Shop", 0x2C, 0x33, (shop_address(4, 3), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 5", ("Shop", 0x2C, 0x34, (shop_address(4, 4), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 6", ("Shop", 0x2C, 0x35, (shop_address(4, 5), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 7", ("Shop", 0x2C, 0x36, (shop_address(4, 6), None), ("the Market", "Market", "Shops"))),
("Market Bazaar Item 8", ("Shop", 0x2C, 0x37, (shop_address(4, 7), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 1", ("Shop", 0x31, 0x30, (shop_address(3, 0), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 2", ("Shop", 0x31, 0x31, (shop_address(3, 1), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 3", ("Shop", 0x31, 0x32, (shop_address(3, 2), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 4", ("Shop", 0x31, 0x33, (shop_address(3, 3), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 5", ("Shop", 0x31, 0x34, (shop_address(3, 4), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 6", ("Shop", 0x31, 0x35, (shop_address(3, 5), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 7", ("Shop", 0x31, 0x36, (shop_address(3, 6), None), ("the Market", "Market", "Shops"))),
("Market Potion Shop Item 8", ("Shop", 0x31, 0x37, (shop_address(3, 7), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 1", ("Shop", 0x32, 0x30, (shop_address(2, 0), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 2", ("Shop", 0x32, 0x31, (shop_address(2, 1), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 3", ("Shop", 0x32, 0x32, (shop_address(2, 2), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 4", ("Shop", 0x32, 0x33, (shop_address(2, 3), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 5", ("Shop", 0x32, 0x34, (shop_address(2, 4), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 6", ("Shop", 0x32, 0x35, (shop_address(2, 5), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 7", ("Shop", 0x32, 0x36, (shop_address(2, 6), None), ("the Market", "Market", "Shops"))),
("Market Bombchu Shop Item 8", ("Shop", 0x32, 0x37, (shop_address(2, 7), None), ("the Market", "Market", "Shops"))),
("ToT Light Arrows Cutscene", ("Cutscene", 0xFF, 0x01, None, ("Temple of Time", "Market",))),
# Hyrule Castle
("HC Malon Egg", ("NPC", 0x5F, 0x47, None, ("Hyrule Castle", "Market",))),
("HC Zeldas Letter", ("NPC", 0x4A, 0x0B, None, ("Hyrule Castle", "Market",))),
("HC Great Fairy Reward", ("Cutscene", 0xFF, 0x11, None, ("Hyrule Castle", "Market", "Fairies"))),
("HC GS Tree", ("GS Token", 0x0E, 0x04, None, ("Hyrule Castle", "Skulltulas",))),
("HC GS Storms Grotto", ("GS Token", 0x0E, 0x02, None, ("Hyrule Castle", "Skulltulas", "Grottos"))),
# Lon Lon Ranch
("LLR Talons Chickens", ("NPC", 0x4C, 0x14, None, ("Lon Lon Ranch", "Kakariko", "Minigames"))),
("LLR Freestanding PoH", ("Collectable", 0x4C, 0x01, None, ("Lon Lon Ranch",))),
("LLR Deku Scrub Grotto Left", ("GrottoNPC", 0xFC, 0x30, None, ("Lon Lon Ranch", "Deku Scrub", "Grottos"))),
("LLR Deku Scrub Grotto Center", ("GrottoNPC", 0xFC, 0x33, None, ("Lon Lon Ranch", "Deku Scrub", "Grottos"))),
("LLR Deku Scrub Grotto Right", ("GrottoNPC", 0xFC, 0x37, None, ("Lon Lon Ranch", "Deku Scrub", "Grottos"))),
("LLR Stables Left Cow", ("NPC", 0x36, 0x15, None, ("Lon Lon Ranch", "Cow",))),
("LLR Stables Right Cow", ("NPC", 0x36, 0x16, None, ("Lon Lon Ranch", "Cow",))),
("LLR Tower Left Cow", ("NPC", 0x4C, 0x16, None, ("Lon Lon Ranch", "Cow",))),
("LLR Tower Right Cow", ("NPC", 0x4C, 0x15, None, ("Lon Lon Ranch", "Cow",))),
("LLR GS House Window", ("GS Token", 0x0B, 0x04, None, ("Lon Lon Ranch", "Skulltulas",))),
("LLR GS Tree", ("GS Token", 0x0B, 0x08, None, ("Lon Lon Ranch", "Skulltulas",))),
("LLR GS Rain Shed", ("GS Token", 0x0B, 0x02, None, ("Lon Lon Ranch", "Skulltulas",))),
("LLR GS Back Wall", ("GS Token", 0x0B, 0x01, None, ("Lon Lon Ranch", "Skulltulas",))),
# Kakariko
("Kak Anju as Child", ("NPC", 0x52, 0x0F, None, ("Kakariko Village", "Kakariko", "Minigames"))),
("Kak Anju as Adult", ("NPC", 0x52, 0x1D, None, ("Kakariko Village", "Kakariko",))),
("Kak Impas House Freestanding PoH", ("Collectable", 0x37, 0x01, None, ("Kakariko Village", "Kakariko",))),
("Kak Windmill Freestanding PoH", ("Collectable", 0x48, 0x01, None, ("Kakariko Village", "Kakariko",))),
("Kak Man on Roof", ("NPC", 0x52, 0x3E, None, ("Kakariko Village", "Kakariko",))),
("Kak Open Grotto Chest", ("Chest", 0x3E, 0x08, None, ("Kakariko Village", "Kakariko", "Grottos"))),
("Kak Redead Grotto Chest", ("Chest", 0x3E, 0x0A, None, ("Kakariko Village", "Kakariko", "Grottos"))),
("Kak Shooting Gallery Reward", ("NPC", 0x42, 0x30, None, ("Kakariko Village", "Kakariko", "Minigames"))),
("Kak 10 Gold Skulltula Reward", ("NPC", 0x50, 0x45, None, ("Kakariko Village", "Kakariko", "Skulltula House"))),
("Kak 20 Gold Skulltula Reward", ("NPC", 0x50, 0x39, None, ("Kakariko Village", "Kakariko", "Skulltula House"))),
("Kak 30 Gold Skulltula Reward", ("NPC", 0x50, 0x46, None, ("Kakariko Village", "Kakariko", "Skulltula House"))),
("Kak 40 Gold Skulltula Reward", ("NPC", 0x50, 0x03, None, ("Kakariko Village", "Kakariko", "Skulltula House"))),
("Kak 50 Gold Skulltula Reward", ("NPC", 0x50, 0x3E, None, ("Kakariko Village", "Kakariko", "Skulltula House"))),
("Kak Impas House Cow", ("NPC", 0x37, 0x15, None, ("Kakariko Village", "Kakariko", "Cow"))),
("Kak GS Tree", ("GS Token", 0x10, 0x20, None, ("Kakariko Village", "Skulltulas",))),
("Kak GS Guards House", ("GS Token", 0x10, 0x02, None, ("Kakariko Village", "Skulltulas",))),
("Kak GS Watchtower", ("GS Token", 0x10, 0x04, None, ("Kakariko Village", "Skulltulas",))),
("Kak GS Skulltula House", ("GS Token", 0x10, 0x10, None, ("Kakariko Village", "Skulltulas",))),
("Kak GS House Under Construction", ("GS Token", 0x10, 0x08, None, ("Kakariko Village", "Skulltulas",))),
("Kak GS Above Impas House", ("GS Token", 0x10, 0x40, None, ("Kakariko Village", "Skulltulas",))),
("Kak Bazaar Item 1", ("Shop", 0x2C, 0x38, (shop_address(5, 0), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 2", ("Shop", 0x2C, 0x39, (shop_address(5, 1), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 3", ("Shop", 0x2C, 0x3A, (shop_address(5, 2), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 4", ("Shop", 0x2C, 0x3B, (shop_address(5, 3), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 5", ("Shop", 0x2C, 0x3D, (shop_address(5, 4), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 6", ("Shop", 0x2C, 0x3E, (shop_address(5, 5), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 7", ("Shop", 0x2C, 0x3F, (shop_address(5, 6), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Bazaar Item 8", ("Shop", 0x2C, 0x40, (shop_address(5, 7), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 1", ("Shop", 0x30, 0x30, (shop_address(1, 0), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 2", ("Shop", 0x30, 0x31, (shop_address(1, 1), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 3", ("Shop", 0x30, 0x32, (shop_address(1, 2), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 4", ("Shop", 0x30, 0x33, (shop_address(1, 3), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 5", ("Shop", 0x30, 0x34, (shop_address(1, 4), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 6", ("Shop", 0x30, 0x35, (shop_address(1, 5), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 7", ("Shop", 0x30, 0x36, (shop_address(1, 6), None), ("Kakariko Village", "Kakariko", "Shops"))),
("Kak Potion Shop Item 8", ("Shop", 0x30, 0x37, (shop_address(1, 7), None), ("Kakariko Village", "Kakariko", "Shops"))),
# Graveyard
("Graveyard Shield Grave Chest", ("Chest", 0x40, 0x00, None, ("the Graveyard", "Kakariko",))),
("Graveyard Heart Piece Grave Chest", ("Chest", 0x3F, 0x00, None, ("the Graveyard", "Kakariko",))),
("Graveyard Composers Grave Chest", ("Chest", 0x41, 0x00, None, ("the Graveyard", "Kakariko",))),
("Graveyard Freestanding PoH", ("Collectable", 0x53, 0x04, None, ("the Graveyard", "Kakariko",))),
("Graveyard Dampe Gravedigging Tour", ("Collectable", 0x53, 0x08, None, ("the Graveyard", "Kakariko",))),
("Graveyard Hookshot Chest", ("Chest", 0x48, 0x00, None, ("the Graveyard", "Kakariko",))),
("Graveyard Dampe Race Freestanding PoH", ("Collectable", 0x48, 0x07, None, ("the Graveyard", "Kakariko", "Minigames"))),
("Graveyard GS Bean Patch", ("GS Token", 0x10, 0x01, None, ("the Graveyard", "Skulltulas",))),
("Graveyard GS Wall", ("GS Token", 0x10, 0x80, None, ("the Graveyard", "Skulltulas",))),
# Death Mountain Trail
("DMT Freestanding PoH", ("Collectable", 0x60, 0x1E, None, ("Death Mountain Trail", "Death Mountain",))),
("DMT Chest", ("Chest", 0x60, 0x01, None, ("Death Mountain Trail", "Death Mountain",))),
("DMT Storms Grotto Chest", ("Chest", 0x3E, 0x17, None, ("Death Mountain Trail", "Death Mountain", "Grottos"))),
("DMT Great Fairy Reward", ("Cutscene", 0xFF, 0x13, None, ("Death Mountain Trail", "Death Mountain", "Fairies"))),
("DMT Biggoron", ("NPC", 0x60, 0x57, None, ("Death Mountain Trail", "Death Mountain",))),
("DMT Cow Grotto Cow", ("NPC", 0x3E, 0x15, None, ("Death Mountain Trail", "Death Mountain", "Cow", "Grottos"))),
("DMT GS Near Kak", ("GS Token", 0x0F, 0x04, None, ("Death Mountain Trail", "Skulltulas",))),
("DMT GS Bean Patch", ("GS Token", 0x0F, 0x02, None, ("Death Mountain Trail", "Skulltulas",))),
("DMT GS Above Dodongos Cavern", ("GS Token", 0x0F, 0x08, None, ("Death Mountain Trail", "Skulltulas",))),
("DMT GS Falling Rocks Path", ("GS Token", 0x0F, 0x10, None, ("Death Mountain Trail", "Skulltulas",))),
# Goron City
("GC Darunias Joy", ("NPC", 0x62, 0x54, None, ("Goron City",))),
("GC Pot Freestanding PoH", ("Collectable", 0x62, 0x1F, None, ("Goron City", "Goron City",))),
("GC Rolling Goron as Child", ("NPC", 0x62, 0x34, None, ("Goron City",))),
("GC Rolling Goron as Adult", ("NPC", 0x62, 0x2C, None, ("Goron City",))),
("GC Medigoron", ("NPC", 0x62, 0x28, None, ("Goron City",))),
("GC Maze Left Chest", ("Chest", 0x62, 0x00, None, ("Goron City",))),
("GC Maze Right Chest", ("Chest", 0x62, 0x01, None, ("Goron City",))),
("GC Maze Center Chest", ("Chest", 0x62, 0x02, None, ("Goron City",))),
("GC Deku Scrub Grotto Left", ("GrottoNPC", 0xFB, 0x30, None, ("Goron City", "Deku Scrub", "Grottos"))),
("GC Deku Scrub Grotto Center", ("GrottoNPC", 0xFB, 0x33, None, ("Goron City", "Deku Scrub", "Grottos"))),
("GC Deku Scrub Grotto Right", ("GrottoNPC", 0xFB, 0x37, None, ("Goron City", "Deku Scrub", "Grottos"))),
("GC GS Center Platform", ("GS Token", 0x0F, 0x20, None, ("Goron City", "Skulltulas",))),
("GC GS Boulder Maze", ("GS Token", 0x0F, 0x40, None, ("Goron City", "Skulltulas",))),
("GC Shop Item 1", ("Shop", 0x2E, 0x30, (shop_address(8, 0), None), ("Goron City", "Shops",))),
("GC Shop Item 2", ("Shop", 0x2E, 0x31, (shop_address(8, 1), None), ("Goron City", "Shops",))),
("GC Shop Item 3", ("Shop", 0x2E, 0x32, (shop_address(8, 2), None), ("Goron City", "Shops",))),
("GC Shop Item 4", ("Shop", 0x2E, 0x33, (shop_address(8, 3), None), ("Goron City", "Shops",))),
("GC Shop Item 5", ("Shop", 0x2E, 0x34, (shop_address(8, 4), None), ("Goron City", "Shops",))),
("GC Shop Item 6", ("Shop", 0x2E, 0x35, (shop_address(8, 5), None), ("Goron City", "Shops",))),
("GC Shop Item 7", ("Shop", 0x2E, 0x36, (shop_address(8, 6), None), ("Goron City", "Shops",))),
("GC Shop Item 8", ("Shop", 0x2E, 0x37, (shop_address(8, 7), None), ("Goron City", "Shops",))),
# Death Mountain Crater
("DMC Volcano Freestanding PoH", ("Collectable", 0x61, 0x08, None, ("Death Mountain Crater", "Death Mountain",))),
("DMC Wall Freestanding PoH", ("Collectable", 0x61, 0x02, None, ("Death Mountain Crater", "Death Mountain",))),
("DMC Upper Grotto Chest", ("Chest", 0x3E, 0x1A, None, ("Death Mountain Crater", "Death Mountain", "Grottos"))),
("DMC Great Fairy Reward", ("Cutscene", 0xFF, 0x14, None, ("Death Mountain Crater", "Death Mountain", "Fairies",))),
("DMC Deku Scrub", ("NPC", 0x61, 0x37, None, ("Death Mountain Crater", "Death Mountain", "Deku Scrub"))),
("DMC Deku Scrub Grotto Left", ("GrottoNPC", 0xF9, 0x30, None, ("Death Mountain Crater", "Death Mountain", "Deku Scrub", "Grottos"))),
("DMC Deku Scrub Grotto Center", ("GrottoNPC", 0xF9, 0x33, None, ("Death Mountain Crater", "Death Mountain", "Deku Scrub", "Grottos"))),
("DMC Deku Scrub Grotto Right", ("GrottoNPC", 0xF9, 0x37, None, ("Death Mountain Crater", "Death Mountain", "Deku Scrub", "Grottos"))),
("DMC GS Crate", ("GS Token", 0x0F, 0x80, None, ("Death Mountain Crater", "Skulltulas",))),
("DMC GS Bean Patch", ("GS Token", 0x0F, 0x01, None, ("Death Mountain Crater", "Skulltulas",))),
# Zora's River
("ZR Magic Bean Salesman", ("NPC", 0x54, 0x16, None, ("Zora's River",))),
("ZR Open Grotto Chest", ("Chest", 0x3E, 0x09, None, ("Zora's River", "Grottos",))),
("ZR Frogs in the Rain", ("NPC", 0x54, 0x3E, None, ("Zora's River", "Minigames",))),
("ZR Frogs Ocarina Game", ("NPC", 0x54, 0x76, None, ("Zora's River",))),
("ZR Near Open Grotto Freestanding PoH", ("Collectable", 0x54, 0x04, None, ("Zora's River",))),
("ZR Near Domain Freestanding PoH", ("Collectable", 0x54, 0x0B, None, ("Zora's River",))),
("ZR Deku Scrub Grotto Front", ("GrottoNPC", 0xEB, 0x3A, None, ("Zora's River", "Deku Scrub", "Grottos"))),
("ZR Deku Scrub Grotto Rear", ("GrottoNPC", 0xEB, 0x39, None, ("Zora's River", "Deku Scrub", "Grottos"))),
("ZR GS Tree", ("GS Token", 0x11, 0x02, None, ("Zora's River", "Skulltulas",))),
("ZR GS Ladder", ("GS Token", 0x11, 0x01, None, ("Zora's River", "Skulltulas",))),
("ZR GS Near Raised Grottos", ("GS Token", 0x11, 0x10, None, ("Zora's River", "Skulltulas",))),
("ZR GS Above Bridge", ("GS Token", 0x11, 0x08, None, ("Zora's River", "Skulltulas",))),
# Zora's Domain
("ZD Diving Minigame", ("NPC", 0x58, 0x37, None, ("Zora's Domain", "Minigames",))),
("ZD Chest", ("Chest", 0x58, 0x00, None, ("Zora's Domain", ))),
("ZD King Zora Thawed", ("NPC", 0x58, 0x2D, None, ("Zora's Domain",))),
("ZD GS Frozen Waterfall", ("GS Token", 0x11, 0x40, None, ("Zora's Domain", "Skulltulas",))),
("ZD Shop Item 1", ("Shop", 0x2F, 0x30, (shop_address(7, 0), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 2", ("Shop", 0x2F, 0x31, (shop_address(7, 1), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 3", ("Shop", 0x2F, 0x32, (shop_address(7, 2), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 4", ("Shop", 0x2F, 0x33, (shop_address(7, 3), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 5", ("Shop", 0x2F, 0x34, (shop_address(7, 4), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 6", ("Shop", 0x2F, 0x35, (shop_address(7, 5), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 7", ("Shop", 0x2F, 0x36, (shop_address(7, 6), None), ("Zora's Domain", "Shops",))),
("ZD Shop Item 8", ("Shop", 0x2F, 0x37, (shop_address(7, 7), None), ("Zora's Domain", "Shops",))),
# Zora's Fountain
("ZF Great Fairy Reward", ("Cutscene", 0xFF, 0x10, None, ("Zora's Fountain", "Fairies",))),
("ZF Iceberg Freestanding PoH", ("Collectable", 0x59, 0x01, None, ("Zora's Fountain",))),
("ZF Bottom Freestanding PoH", ("Collectable", 0x59, 0x14, None, ("Zora's Fountain",))),
("ZF GS Above the Log", ("GS Token", 0x11, 0x04, None, ("Zora's Fountain", "Skulltulas",))),
("ZF GS Tree", ("GS Token", 0x11, 0x80, None, ("Zora's Fountain", "Skulltulas",))),
("ZF GS Hidden Cave", ("GS Token", 0x11, 0x20, None, ("Zora's Fountain", "Skulltulas",))),
# Lake Hylia
("LH Underwater Item", ("NPC", 0x57, 0x15, None, ("Lake Hylia",))),
("LH Child Fishing", ("NPC", 0x49, 0x3E, None, ("Lake Hylia", "Minigames",))),
("LH Adult Fishing", ("NPC", 0x49, 0x38, None, ("Lake Hylia", "Minigames",))),
("LH Lab Dive", ("NPC", 0x38, 0x3E, None, ("Lake Hylia",))),
("LH Freestanding PoH", ("Collectable", 0x57, 0x1E, None, ("Lake Hylia",))),
("LH Sun", ("NPC", 0x57, 0x58, None, ("Lake Hylia",))),
("LH Deku Scrub Grotto Left", ("GrottoNPC", 0xEF, 0x30, None, ("Lake Hylia", "Deku Scrub", "Grottos"))),
("LH Deku Scrub Grotto Center", ("GrottoNPC", 0xEF, 0x33, None, ("Lake Hylia", "Deku Scrub", "Grottos"))),
("LH Deku Scrub Grotto Right", ("GrottoNPC", 0xEF, 0x37, None, ("Lake Hylia", "Deku Scrub", "Grottos"))),
("LH GS Bean Patch", ("GS Token", 0x12, 0x01, None, ("Lake Hylia", "Skulltulas",))),
("LH GS Lab Wall", ("GS Token", 0x12, 0x04, None, ("Lake Hylia", "Skulltulas",))),
("LH GS Small Island", ("GS Token", 0x12, 0x02, None, ("Lake Hylia", "Skulltulas",))),
("LH GS Lab Crate", ("GS Token", 0x12, 0x08, None, ("Lake Hylia", "Skulltulas",))),
("LH GS Tree", ("GS Token", 0x12, 0x10, None, ("Lake Hylia", "Skulltulas",))),
# Gerudo Valley
("GV Crate Freestanding PoH", ("Collectable", 0x5A, 0x02, None, ("Gerudo Valley", "Gerudo",))),
("GV Waterfall Freestanding PoH", ("Collectable", 0x5A, 0x01, None, ("Gerudo Valley", "Gerudo",))),
("GV Chest", ("Chest", 0x5A, 0x00, None, ("Gerudo Valley", "Gerudo",))),
("GV Deku Scrub Grotto Front", ("GrottoNPC", 0xF0, 0x3A, None, ("Gerudo Valley", "Gerudo", "Deku Scrub", "Grottos"))),
("GV Deku Scrub Grotto Rear", ("GrottoNPC", 0xF0, 0x39, None, ("Gerudo Valley", "Gerudo", "Deku Scrub", "Grottos"))),
("GV Cow", ("NPC", 0x5A, 0x15, None, ("Gerudo Valley", "Gerudo", "Cow"))),
("GV GS Small Bridge", ("GS Token", 0x13, 0x02, None, ("Gerudo Valley", "Skulltulas",))),
("GV GS Bean Patch", ("GS Token", 0x13, 0x01, None, ("Gerudo Valley", "Skulltulas",))),
("GV GS Behind Tent", ("GS Token", 0x13, 0x08, None, ("Gerudo Valley", "Skulltulas",))),
("GV GS Pillar", ("GS Token", 0x13, 0x04, None, ("Gerudo Valley", "Skulltulas",))),
# Gerudo's Fortress
("GF North F1 Carpenter", ("Collectable", 0x0C, 0x0C, None, ("Gerudo's Fortress", "Gerudo",))),
("GF North F2 Carpenter", ("Collectable", 0x0C, 0x0A, None, ("Gerudo's Fortress", "Gerudo",))),
("GF South F1 Carpenter", ("Collectable", 0x0C, 0x0E, None, ("Gerudo's Fortress", "Gerudo",))),
("GF South F2 Carpenter", ("Collectable", 0x0C, 0x0F, None, ("Gerudo's Fortress", "Gerudo",))),
("GF Gerudo Membership Card", ("NPC", 0x0C, 0x3A, None, ("Gerudo's Fortress", "Gerudo",))),
("GF Chest", ("Chest", 0x5D, 0x00, None, ("Gerudo's Fortress", "Gerudo",))),
("GF HBA 1000 Points", ("NPC", 0x5D, 0x3E, None, ("Gerudo's Fortress", "Gerudo", "Minigames"))),
("GF HBA 1500 Points", ("NPC", 0x5D, 0x30, None, ("Gerudo's Fortress", "Gerudo", "Minigames"))),
("GF GS Top Floor", ("GS Token", 0x14, 0x02, None, ("Gerudo's Fortress", "Skulltulas",))),
("GF GS Archery Range", ("GS Token", 0x14, 0x01, None, ("Gerudo's Fortress", "Skulltulas",))),
# Wasteland
("Wasteland Bombchu Salesman", ("NPC", 0x5E, 0x03, None, ("Haunted Wasteland",))),
("Wasteland Chest", ("Chest", 0x5E, 0x00, None, ("Haunted Wasteland",))),
("Wasteland GS", ("GS Token", 0x15, 0x02, None, ("Haunted Wasteland", "Skulltulas",))),
# Colossus
("Colossus Great Fairy Reward", ("Cutscene", 0xFF, 0x12, None, ("Desert Colossus", "Fairies",))),
("Colossus Freestanding PoH", ("Collectable", 0x5C, 0x0D, None, ("Desert Colossus",))),
("Colossus Deku Scrub Grotto Front", ("GrottoNPC", 0xFD, 0x3A, None, ("Desert Colossus", "Deku Scrub", "Grottos"))),
("Colossus Deku Scrub Grotto Rear", ("GrottoNPC", 0xFD, 0x39, None, ("Desert Colossus", "Deku Scrub", "Grottos"))),
("Colossus GS Bean Patch", ("GS Token", 0x15, 0x01, None, ("Desert Colossus", "Skulltulas",))),
("Colossus GS Tree", ("GS Token", 0x15, 0x08, None, ("Desert Colossus", "Skulltulas",))),
("Colossus GS Hill", ("GS Token", 0x15, 0x04, None, ("Desert Colossus", "Skulltulas",))),
# Outside Ganon's Castle
("OGC Great Fairy Reward", ("Cutscene", 0xFF, 0x15, None, ("outside Ganon's Castle", "Market", "Fairies"))),
("OGC GS", ("GS Token", 0x0E, 0x01, None, ("outside Ganon's Castle", "Skulltulas",))),
## Dungeons
# Deku Tree vanilla
("Deku Tree Map Chest", ("Chest", 0x00, 0x03, None, ("Deku Tree",))),
("Deku Tree Slingshot Room Side Chest", ("Chest", 0x00, 0x05, None, ("Deku Tree",))),
("Deku Tree Slingshot Chest", ("Chest", 0x00, 0x01, None, ("Deku Tree",))),
("Deku Tree Compass Chest", ("Chest", 0x00, 0x02, None, ("Deku Tree",))),
("Deku Tree Compass Room Side Chest", ("Chest", 0x00, 0x06, None, ("Deku Tree",))),
("Deku Tree Basement Chest", ("Chest", 0x00, 0x04, None, ("Deku Tree",))),
("Deku Tree GS Compass Room", ("GS Token", 0x00, 0x08, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree GS Basement Vines", ("GS Token", 0x00, 0x04, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree GS Basement Gate", ("GS Token", 0x00, 0x02, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree GS Basement Back Room", ("GS Token", 0x00, 0x01, None, ("Deku Tree", "Skulltulas",))),
# Deku Tree MQ
("Deku Tree MQ Map Chest", ("Chest", 0x00, 0x03, None, ("Deku Tree",))),
("Deku Tree MQ Slingshot Chest", ("Chest", 0x00, 0x06, None, ("Deku Tree",))),
("Deku Tree MQ Slingshot Room Back Chest", ("Chest", 0x00, 0x02, None, ("Deku Tree",))),
("Deku Tree MQ Compass Chest", ("Chest", 0x00, 0x01, None, ("Deku Tree",))),
("Deku Tree MQ Basement Chest", ("Chest", 0x00, 0x04, None, ("Deku Tree",))),
("Deku Tree MQ Before Spinning Log Chest", ("Chest", 0x00, 0x05, None, ("Deku Tree",))),
("Deku Tree MQ After Spinning Log Chest", ("Chest", 0x00, 0x00, None, ("Deku Tree",))),
("Deku Tree MQ Deku Scrub", ("NPC", 0x00, 0x34, None, ("Deku Tree", "Deku Scrub",))),
("Deku Tree MQ GS Lobby", ("GS Token", 0x00, 0x02, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree MQ GS Compass Room", ("GS Token", 0x00, 0x08, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree MQ GS Basement Graves Room", ("GS Token", 0x00, 0x04, None, ("Deku Tree", "Skulltulas",))),
("Deku Tree MQ GS Basement Back Room", ("GS Token", 0x00, 0x01, None, ("Deku Tree", "Skulltulas",))),
# Deku Tree shared
("Deku Tree Queen Gohma Heart", ("BossHeart", 0x11, 0x4F, None, ("Deku Tree",))),
# Dodongo's Cavern vanilla
("Dodongos Cavern Map Chest", ("Chest", 0x01, 0x08, None, ("Dodongo's Cavern",))),
("Dodongos Cavern Compass Chest", ("Chest", 0x01, 0x05, None, ("Dodongo's Cavern",))),
("Dodongos Cavern Bomb Flower Platform Chest", ("Chest", 0x01, 0x06, None, ("Dodongo's Cavern",))),
("Dodongos Cavern Bomb Bag Chest", ("Chest", 0x01, 0x04, None, ("Dodongo's Cavern",))),
("Dodongos Cavern End of Bridge Chest", ("Chest", 0x01, 0x0A, None, ("Dodongo's Cavern",))),
("Dodongos Cavern Deku Scrub Side Room Near Dodongos", ("NPC", 0x01, 0x31, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern Deku Scrub Lobby", ("NPC", 0x01, 0x34, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern Deku Scrub Near Bomb Bag Left", ("NPC", 0x01, 0x30, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern Deku Scrub Near Bomb Bag Right", ("NPC", 0x01, 0x33, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern GS Side Room Near Lower Lizalfos", ("GS Token", 0x01, 0x10, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern GS Scarecrow", ("GS Token", 0x01, 0x02, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern GS Alcove Above Stairs", ("GS Token", 0x01, 0x04, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern GS Vines Above Stairs", ("GS Token", 0x01, 0x01, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern GS Back Room", ("GS Token", 0x01, 0x08, None, ("Dodongo's Cavern", "Skulltulas",))),
# Dodongo's Cavern MQ
("Dodongos Cavern MQ Map Chest", ("Chest", 0x01, 0x00, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Bomb Bag Chest", ("Chest", 0x01, 0x04, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Torch Puzzle Room Chest", ("Chest", 0x01, 0x03, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Larvae Room Chest", ("Chest", 0x01, 0x02, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Compass Chest", ("Chest", 0x01, 0x05, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Under Grave Chest", ("Chest", 0x01, 0x01, None, ("Dodongo's Cavern",))),
("Dodongos Cavern MQ Deku Scrub Lobby Front", ("NPC", 0x01, 0x33, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern MQ Deku Scrub Lobby Rear", ("NPC", 0x01, 0x31, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern MQ Deku Scrub Side Room Near Lower Lizalfos", ("NPC", 0x01, 0x39, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern MQ Deku Scrub Staircase", ("NPC", 0x01, 0x34, None, ("Dodongo's Cavern", "Deku Scrub",))),
("Dodongos Cavern MQ GS Scrub Room", ("GS Token", 0x01, 0x02, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern MQ GS Larvae Room", ("GS Token", 0x01, 0x10, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern MQ GS Lizalfos Room", ("GS Token", 0x01, 0x04, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern MQ GS Song of Time Block Room", ("GS Token", 0x01, 0x08, None, ("Dodongo's Cavern", "Skulltulas",))),
("Dodongos Cavern MQ GS Back Area", ("GS Token", 0x01, 0x01, None, ("Dodongo's Cavern", "Skulltulas",))),
# Dodongo's Cavern shared
("Dodongos Cavern Boss Room Chest", ("Chest", 0x12, 0x00, None, ("Dodongo's Cavern",))),
("Dodongos Cavern King Dodongo Heart", ("BossHeart", 0x12, 0x4F, None, ("Dodongo's Cavern",))),
# Jabu Jabu's Belly vanilla
("Jabu Jabus Belly Boomerang Chest", ("Chest", 0x02, 0x01, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly Map Chest", ("Chest", 0x02, 0x02, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly Compass Chest", ("Chest", 0x02, 0x04, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly Deku Scrub", ("NPC", 0x02, 0x30, None, ("Jabu Jabu's Belly", "Deku Scrub",))),
("Jabu Jabus Belly GS Water Switch Room", ("GS Token", 0x02, 0x08, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly GS Lobby Basement Lower", ("GS Token", 0x02, 0x01, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly GS Lobby Basement Upper", ("GS Token", 0x02, 0x02, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly GS Near Boss", ("GS Token", 0x02, 0x04, None, ("Jabu Jabu's Belly", "Skulltulas",))),
# Jabu Jabu's Belly MQ
("Jabu Jabus Belly MQ Map Chest", ("Chest", 0x02, 0x03, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ First Room Side Chest", ("Chest", 0x02, 0x05, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Second Room Lower Chest", ("Chest", 0x02, 0x02, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Compass Chest", ("Chest", 0x02, 0x00, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Basement Near Switches Chest", ("Chest", 0x02, 0x08, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Basement Near Vines Chest", ("Chest", 0x02, 0x04, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Boomerang Room Small Chest", ("Chest", 0x02, 0x01, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Boomerang Chest", ("Chest", 0x02, 0x06, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Falling Like Like Room Chest", ("Chest", 0x02, 0x09, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Second Room Upper Chest", ("Chest", 0x02, 0x07, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Near Boss Chest", ("Chest", 0x02, 0x0A, None, ("Jabu Jabu's Belly",))),
("Jabu Jabus Belly MQ Cow", ("NPC", 0x02, 0x15, None, ("Jabu Jabu's Belly", "Cow",))),
("Jabu Jabus Belly MQ GS Boomerang Chest Room", ("GS Token", 0x02, 0x01, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly MQ GS Tailpasaran Room", ("GS Token", 0x02, 0x04, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly MQ GS Invisible Enemies Room", ("GS Token", 0x02, 0x08, None, ("Jabu Jabu's Belly", "Skulltulas",))),
("Jabu Jabus Belly MQ GS Near Boss", ("GS Token", 0x02, 0x02, None, ("Jabu Jabu's Belly", "Skulltulas",))),
# Jabu Jabu's Belly shared
("Jabu Jabus Belly Barinade Heart", ("BossHeart", 0x13, 0x4F, None, ("Jabu Jabu's Belly",))),
# Bottom of the Well vanilla
("Bottom of the Well Front Left Fake Wall Chest", ("Chest", 0x08, 0x08, None, ("Bottom of the Well",))),
("Bottom of the Well Front Center Bombable Chest", ("Chest", 0x08, 0x02, None, ("Bottom of the Well",))),
("Bottom of the Well Back Left Bombable Chest", ("Chest", 0x08, 0x04, None, ("Bottom of the Well",))),
("Bottom of the Well Underwater Left Chest", ("Chest", 0x08, 0x09, None, ("Bottom of the Well",))),
("Bottom of the Well Freestanding Key", ("Collectable", 0x08, 0x01, None, ("Bottom of the Well",))),
("Bottom of the Well Compass Chest", ("Chest", 0x08, 0x01, None, ("Bottom of the Well",))),
("Bottom of the Well Center Skulltula Chest", ("Chest", 0x08, 0x0E, None, ("Bottom of the Well",))),
("Bottom of the Well Right Bottom Fake Wall Chest", ("Chest", 0x08, 0x05, None, ("Bottom of the Well",))),
("Bottom of the Well Fire Keese Chest", ("Chest", 0x08, 0x0A, None, ("Bottom of the Well",))),
("Bottom of the Well Like Like Chest", ("Chest", 0x08, 0x0C, None, ("Bottom of the Well",))),
("Bottom of the Well Map Chest", ("Chest", 0x08, 0x07, None, ("Bottom of the Well",))),
("Bottom of the Well Underwater Front Chest", ("Chest", 0x08, 0x10, None, ("Bottom of the Well",))),
("Bottom of the Well Invisible Chest", ("Chest", 0x08, 0x14, None, ("Bottom of the Well",))),
("Bottom of the Well Lens of Truth Chest", ("Chest", 0x08, 0x03, None, ("Bottom of the Well",))),
("Bottom of the Well GS West Inner Room", ("GS Token", 0x08, 0x04, None, ("Bottom of the Well", "Skulltulas",))),
("Bottom of the Well GS East Inner Room", ("GS Token", 0x08, 0x02, None, ("Bottom of the Well", "Skulltulas",))),
("Bottom of the Well GS Like Like Cage", ("GS Token", 0x08, 0x01, None, ("Bottom of the Well", "Skulltulas",))),
# Bottom of the Well MQ
("Bottom of the Well MQ Map Chest", ("Chest", 0x08, 0x03, None, ("Bottom of the Well",))),
("Bottom of the Well MQ East Inner Room Freestanding Key",("Collectable", 0x08, 0x01, None, ("Bottom of the Well",))),
("Bottom of the Well MQ Compass Chest", ("Chest", 0x08, 0x02, None, ("Bottom of the Well",))),
("Bottom of the Well MQ Dead Hand Freestanding Key", ("Collectable", 0x08, 0x02, None, ("Bottom of the Well",))),
("Bottom of the Well MQ Lens of Truth Chest", ("Chest", 0x08, 0x01, None, ("Bottom of the Well",))),
("Bottom of the Well MQ GS Coffin Room", ("GS Token", 0x08, 0x04, None, ("Bottom of the Well", "Skulltulas",))),
("Bottom of the Well MQ GS West Inner Room", ("GS Token", 0x08, 0x02, None, ("Bottom of the Well", "Skulltulas",))),
("Bottom of the Well MQ GS Basement", ("GS Token", 0x08, 0x01, None, ("Bottom of the Well", "Skulltulas",))),
# Forest Temple vanilla
("Forest Temple First Room Chest", ("Chest", 0x03, 0x03, None, ("Forest Temple",))),
("Forest Temple First Stalfos Chest", ("Chest", 0x03, 0x00, None, ("Forest Temple",))),
("Forest Temple Raised Island Courtyard Chest", ("Chest", 0x03, 0x05, None, ("Forest Temple",))),
("Forest Temple Map Chest", ("Chest", 0x03, 0x01, None, ("Forest Temple",))),
("Forest Temple Well Chest", ("Chest", 0x03, 0x09, None, ("Forest Temple",))),
("Forest Temple Eye Switch Chest", ("Chest", 0x03, 0x04, None, ("Forest Temple",))),
("Forest Temple Boss Key Chest", ("Chest", 0x03, 0x0E, None, ("Forest Temple",))),
("Forest Temple Floormaster Chest", ("Chest", 0x03, 0x02, None, ("Forest Temple",))),
("Forest Temple Red Poe Chest", ("Chest", 0x03, 0x0D, None, ("Forest Temple",))),
("Forest Temple Bow Chest", ("Chest", 0x03, 0x0C, None, ("Forest Temple",))),
("Forest Temple Blue Poe Chest", ("Chest", 0x03, 0x0F, None, ("Forest Temple",))),
("Forest Temple Falling Ceiling Room Chest", ("Chest", 0x03, 0x07, None, ("Forest Temple",))),
("Forest Temple Basement Chest", ("Chest", 0x03, 0x0B, None, ("Forest Temple",))),
("Forest Temple GS First Room", ("GS Token", 0x03, 0x02, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple GS Lobby", ("GS Token", 0x03, 0x08, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple GS Raised Island Courtyard", ("GS Token", 0x03, 0x01, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple GS Level Island Courtyard", ("GS Token", 0x03, 0x04, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple GS Basement", ("GS Token", 0x03, 0x10, None, ("Forest Temple", "Skulltulas",))),
# Forest Temple MQ
("Forest Temple MQ First Room Chest", ("Chest", 0x03, 0x03, None, ("Forest Temple",))),
("Forest Temple MQ Wolfos Chest", ("Chest", 0x03, 0x00, None, ("Forest Temple",))),
("Forest Temple MQ Well Chest", ("Chest", 0x03, 0x09, None, ("Forest Temple",))),
("Forest Temple MQ Raised Island Courtyard Lower Chest",("Chest", 0x03, 0x01, None, ("Forest Temple",))),
("Forest Temple MQ Raised Island Courtyard Upper Chest",("Chest", 0x03, 0x05, None, ("Forest Temple",))),
("Forest Temple MQ Boss Key Chest", ("Chest", 0x03, 0x0E, None, ("Forest Temple",))),
("Forest Temple MQ Redead Chest", ("Chest", 0x03, 0x02, None, ("Forest Temple",))),
("Forest Temple MQ Map Chest", ("Chest", 0x03, 0x0D, None, ("Forest Temple",))),
("Forest Temple MQ Bow Chest", ("Chest", 0x03, 0x0C, None, ("Forest Temple",))),
("Forest Temple MQ Compass Chest", ("Chest", 0x03, 0x0F, None, ("Forest Temple",))),
("Forest Temple MQ Falling Ceiling Room Chest", ("Chest", 0x03, 0x06, None, ("Forest Temple",))),
("Forest Temple MQ Basement Chest", ("Chest", 0x03, 0x0B, None, ("Forest Temple",))),
("Forest Temple MQ GS First Hallway", ("GS Token", 0x03, 0x02, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple MQ GS Raised Island Courtyard", ("GS Token", 0x03, 0x01, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple MQ GS Level Island Courtyard", ("GS Token", 0x03, 0x04, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple MQ GS Well", ("GS Token", 0x03, 0x08, None, ("Forest Temple", "Skulltulas",))),
("Forest Temple MQ GS Block Push Room", ("GS Token", 0x03, 0x10, None, ("Forest Temple", "Skulltulas",))),
# Forest Temple shared
("Forest Temple Phantom Ganon Heart", ("BossHeart", 0x14, 0x4F, None, ("Forest Temple",))),
# Fire Temple vanilla
("Fire Temple Near Boss Chest", ("Chest", 0x04, 0x01, None, ("Fire Temple",))),
("Fire Temple Flare Dancer Chest", ("Chest", 0x04, 0x00, None, ("Fire Temple",))),
("Fire Temple Boss Key Chest", ("Chest", 0x04, 0x0C, None, ("Fire Temple",))),
("Fire Temple Big Lava Room Lower Open Door Chest", ("Chest", 0x04, 0x04, None, ("Fire Temple",))),
("Fire Temple Big Lava Room Blocked Door Chest", ("Chest", 0x04, 0x02, None, ("Fire Temple",))),
("Fire Temple Boulder Maze Lower Chest", ("Chest", 0x04, 0x03, None, ("Fire Temple",))),
("Fire Temple Boulder Maze Side Room Chest", ("Chest", 0x04, 0x08, None, ("Fire Temple",))),
("Fire Temple Map Chest", ("Chest", 0x04, 0x0A, None, ("Fire Temple",))),
("Fire Temple Boulder Maze Shortcut Chest", ("Chest", 0x04, 0x0B, None, ("Fire Temple",))),
("Fire Temple Boulder Maze Upper Chest", ("Chest", 0x04, 0x06, None, ("Fire Temple",))),
("Fire Temple Scarecrow Chest", ("Chest", 0x04, 0x0D, None, ("Fire Temple",))),
("Fire Temple Compass Chest", ("Chest", 0x04, 0x07, None, ("Fire Temple",))),
("Fire Temple Megaton Hammer Chest", ("Chest", 0x04, 0x05, None, ("Fire Temple",))),
("Fire Temple Highest Goron Chest", ("Chest", 0x04, 0x09, None, ("Fire Temple",))),
("Fire Temple GS Boss Key Loop", ("GS Token", 0x04, 0x02, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple GS Song of Time Room", ("GS Token", 0x04, 0x01, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple GS Boulder Maze", ("GS Token", 0x04, 0x04, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple GS Scarecrow Climb", ("GS Token", 0x04, 0x10, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple GS Scarecrow Top", ("GS Token", 0x04, 0x08, None, ("Fire Temple", "Skulltulas",))),
# Fire Temple MQ
("Fire Temple MQ Map Room Side Chest", ("Chest", 0x04, 0x02, None, ("Fire Temple",))),
("Fire Temple MQ Megaton Hammer Chest", ("Chest", 0x04, 0x00, None, ("Fire Temple",))),
("Fire Temple MQ Map Chest", ("Chest", 0x04, 0x0C, None, ("Fire Temple",))),
("Fire Temple MQ Near Boss Chest", ("Chest", 0x04, 0x07, None, ("Fire Temple",))),
("Fire Temple MQ Big Lava Room Blocked Door Chest", ("Chest", 0x04, 0x01, None, ("Fire Temple",))),
("Fire Temple MQ Boss Key Chest", ("Chest", 0x04, 0x04, None, ("Fire Temple",))),
("Fire Temple MQ Lizalfos Maze Side Room Chest", ("Chest", 0x04, 0x08, None, ("Fire Temple",))),
("Fire Temple MQ Compass Chest", ("Chest", 0x04, 0x0B, None, ("Fire Temple",))),
("Fire Temple MQ Lizalfos Maze Upper Chest", ("Chest", 0x04, 0x06, None, ("Fire Temple",))),
("Fire Temple MQ Lizalfos Maze Lower Chest", ("Chest", 0x04, 0x03, None, ("Fire Temple",))),
("Fire Temple MQ Freestanding Key", ("Collectable", 0x04, 0x1C, None, ("Fire Temple",))),
("Fire Temple MQ Chest On Fire", ("Chest", 0x04, 0x05, None, ("Fire Temple",))),
("Fire Temple MQ GS Big Lava Room Open Door", ("GS Token", 0x04, 0x01, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple MQ GS Skull On Fire", ("GS Token", 0x04, 0x04, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple MQ GS Fire Wall Maze Center", ("GS Token", 0x04, 0x08, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple MQ GS Fire Wall Maze Side Room", ("GS Token", 0x04, 0x10, None, ("Fire Temple", "Skulltulas",))),
("Fire Temple MQ GS Above Fire Wall Maze", ("GS Token", 0x04, 0x02, None, ("Fire Temple", "Skulltulas",))),
# Fire Temple shared
("Fire Temple Volvagia Heart", ("BossHeart", 0x15, 0x4F, None, ("Fire Temple",))),
# Water Temple vanilla
("Water Temple Compass Chest", ("Chest", 0x05, 0x09, None, ("Water Temple",))),
("Water Temple Map Chest", ("Chest", 0x05, 0x02, None, ("Water Temple",))),
("Water Temple Cracked Wall Chest", ("Chest", 0x05, 0x00, None, ("Water Temple",))),
("Water Temple Torches Chest", ("Chest", 0x05, 0x01, None, ("Water Temple",))),
("Water Temple Boss Key Chest", ("Chest", 0x05, 0x05, None, ("Water Temple",))),
("Water Temple Central Pillar Chest", ("Chest", 0x05, 0x06, None, ("Water Temple",))),
("Water Temple Central Bow Target Chest", ("Chest", 0x05, 0x08, None, ("Water Temple",))),
("Water Temple Longshot Chest", ("Chest", 0x05, 0x07, None, ("Water Temple",))),
("Water Temple River Chest", ("Chest", 0x05, 0x03, None, ("Water Temple",))),
("Water Temple Dragon Chest", ("Chest", 0x05, 0x0A, None, ("Water Temple",))),
("Water Temple GS Behind Gate", ("GS Token", 0x05, 0x01, None, ("Water Temple", "Skulltulas",))),
("Water Temple GS Near Boss Key Chest", ("GS Token", 0x05, 0x08, None, ("Water Temple", "Skulltulas",))),
("Water Temple GS Central Pillar", ("GS Token", 0x05, 0x04, None, ("Water Temple", "Skulltulas",))),
("Water Temple GS Falling Platform Room", ("GS Token", 0x05, 0x02, None, ("Water Temple", "Skulltulas",))),
("Water Temple GS River", ("GS Token", 0x05, 0x10, None, ("Water Temple", "Skulltulas",))),
# Water Temple MQ
("Water Temple MQ Longshot Chest", ("Chest", 0x05, 0x00, None, ("Water Temple",))),
("Water Temple MQ Map Chest", ("Chest", 0x05, 0x02, None, ("Water Temple",))),
("Water Temple MQ Compass Chest", ("Chest", 0x05, 0x01, None, ("Water Temple",))),
("Water Temple MQ Central Pillar Chest", ("Chest", 0x05, 0x06, None, ("Water Temple",))),
("Water Temple MQ Boss Key Chest", ("Chest", 0x05, 0x05, None, ("Water Temple",))),
("Water Temple MQ Freestanding Key", ("Collectable", 0x05, 0x01, None, ("Water Temple",))),
("Water Temple MQ GS Lizalfos Hallway", ("GS Token", 0x05, 0x01, None, ("Water Temple", "Skulltulas",))),
("Water Temple MQ GS Before Upper Water Switch", ("GS Token", 0x05, 0x04, None, ("Water Temple", "Skulltulas",))),
("Water Temple MQ GS River", ("GS Token", 0x05, 0x02, None, ("Water Temple", "Skulltulas",))),
("Water Temple MQ GS Freestanding Key Area", ("GS Token", 0x05, 0x08, None, ("Water Temple", "Skulltulas",))),
("Water Temple MQ GS Triple Wall Torch", ("GS Token", 0x05, 0x10, None, ("Water Temple", "Skulltulas",))),
# Water Temple shared
("Water Temple Morpha Heart", ("BossHeart", 0x16, 0x4F, None, ("Water Temple",))),
# Shadow Temple vanilla
("Shadow Temple Map Chest", ("Chest", 0x07, 0x01, None, ("Shadow Temple",))),
("Shadow Temple Hover Boots Chest", ("Chest", 0x07, 0x07, None, ("Shadow Temple",))),
("Shadow Temple Compass Chest", ("Chest", 0x07, 0x03, None, ("Shadow Temple",))),
("Shadow Temple Early Silver Rupee Chest", ("Chest", 0x07, 0x02, None, ("Shadow Temple",))),
("Shadow Temple Invisible Blades Visible Chest", ("Chest", 0x07, 0x0C, None, ("Shadow Temple",))),
("Shadow Temple Invisible Blades Invisible Chest", ("Chest", 0x07, 0x16, None, ("Shadow Temple",))),
("Shadow Temple Falling Spikes Lower Chest", ("Chest", 0x07, 0x05, None, ("Shadow Temple",))),
("Shadow Temple Falling Spikes Upper Chest", ("Chest", 0x07, 0x06, None, ("Shadow Temple",))),
("Shadow Temple Falling Spikes Switch Chest", ("Chest", 0x07, 0x04, None, ("Shadow Temple",))),
("Shadow Temple Invisible Spikes Chest", ("Chest", 0x07, 0x09, None, ("Shadow Temple",))),
("Shadow Temple Freestanding Key", ("Collectable", 0x07, 0x01, None, ("Shadow Temple",))),
("Shadow Temple Wind Hint Chest", ("Chest", 0x07, 0x15, None, ("Shadow Temple",))),
("Shadow Temple After Wind Enemy Chest", ("Chest", 0x07, 0x08, None, ("Shadow Temple",))),
("Shadow Temple After Wind Hidden Chest", ("Chest", 0x07, 0x14, None, ("Shadow Temple",))),
("Shadow Temple Spike Walls Left Chest", ("Chest", 0x07, 0x0A, None, ("Shadow Temple",))),
("Shadow Temple Boss Key Chest", ("Chest", 0x07, 0x0B, None, ("Shadow Temple",))),
("Shadow Temple Invisible Floormaster Chest", ("Chest", 0x07, 0x0D, None, ("Shadow Temple",))),
("Shadow Temple GS Like Like Room", ("GS Token", 0x07, 0x08, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple GS Falling Spikes Room", ("GS Token", 0x07, 0x02, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple GS Single Giant Pot", ("GS Token", 0x07, 0x01, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple GS Near Ship", ("GS Token", 0x07, 0x10, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple GS Triple Giant Pot", ("GS Token", 0x07, 0x04, None, ("Shadow Temple", "Skulltulas",))),
# Shadow Temple MQ
("Shadow Temple MQ Early Gibdos Chest", ("Chest", 0x07, 0x03, None, ("Shadow Temple",))),
("Shadow Temple MQ Map Chest", ("Chest", 0x07, 0x02, None, ("Shadow Temple",))),
("Shadow Temple MQ Near Ship Invisible Chest", ("Chest", 0x07, 0x0E, None, ("Shadow Temple",))),
("Shadow Temple MQ Compass Chest", ("Chest", 0x07, 0x01, None, ("Shadow Temple",))),
("Shadow Temple MQ Hover Boots Chest", ("Chest", 0x07, 0x07, None, ("Shadow Temple",))),
("Shadow Temple MQ Invisible Blades Invisible Chest", ("Chest", 0x07, 0x16, None, ("Shadow Temple",))),
("Shadow Temple MQ Invisible Blades Visible Chest", ("Chest", 0x07, 0x0C, None, ("Shadow Temple",))),
("Shadow Temple MQ Beamos Silver Rupees Chest", ("Chest", 0x07, 0x0F, None, ("Shadow Temple",))),
("Shadow Temple MQ Falling Spikes Lower Chest", ("Chest", 0x07, 0x05, None, ("Shadow Temple",))),
("Shadow Temple MQ Falling Spikes Upper Chest", ("Chest", 0x07, 0x06, None, ("Shadow Temple",))),
("Shadow Temple MQ Falling Spikes Switch Chest", ("Chest", 0x07, 0x04, None, ("Shadow Temple",))),
("Shadow Temple MQ Invisible Spikes Chest", ("Chest", 0x07, 0x09, None, ("Shadow Temple",))),
("Shadow Temple MQ Stalfos Room Chest", ("Chest", 0x07, 0x10, None, ("Shadow Temple",))),
("Shadow Temple MQ Wind Hint Chest", ("Chest", 0x07, 0x15, None, ("Shadow Temple",))),
("Shadow Temple MQ After Wind Hidden Chest", ("Chest", 0x07, 0x14, None, ("Shadow Temple",))),
("Shadow Temple MQ After Wind Enemy Chest", ("Chest", 0x07, 0x08, None, ("Shadow Temple",))),
("Shadow Temple MQ Boss Key Chest", ("Chest", 0x07, 0x0B, None, ("Shadow Temple",))),
("Shadow Temple MQ Spike Walls Left Chest", ("Chest", 0x07, 0x0A, None, ("Shadow Temple",))),
("Shadow Temple MQ Freestanding Key", ("Collectable", 0x07, 0x06, None, ("Shadow Temple",))),
("Shadow Temple MQ Bomb Flower Chest", ("Chest", 0x07, 0x0D, None, ("Shadow Temple",))),
("Shadow Temple MQ GS Falling Spikes Room", ("GS Token", 0x07, 0x02, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple MQ GS Wind Hint Room", ("GS Token", 0x07, 0x01, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple MQ GS After Wind", ("GS Token", 0x07, 0x08, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple MQ GS After Ship", ("GS Token", 0x07, 0x10, None, ("Shadow Temple", "Skulltulas",))),
("Shadow Temple MQ GS Near Boss", ("GS Token", 0x07, 0x04, None, ("Shadow Temple", "Skulltulas",))),
# Shadow Temple shared
("Shadow Temple Bongo Bongo Heart", ("BossHeart", 0x18, 0x4F, None, ("Shadow Temple",))),
# Spirit Temple shared
# Vanilla and MQ locations are mixed to ensure the positions of Silver Gauntlets/Mirror Shield chests are correct for both versions
("Spirit Temple Child Bridge Chest", ("Chest", 0x06, 0x08, None, ("Spirit Temple",))),
("Spirit Temple Child Early Torches Chest", ("Chest", 0x06, 0x00, None, ("Spirit Temple",))),
("Spirit Temple Child Climb North Chest", ("Chest", 0x06, 0x06, None, ("Spirit Temple",))),
("Spirit Temple Child Climb East Chest", ("Chest", 0x06, 0x0C, None, ("Spirit Temple",))),
("Spirit Temple Map Chest", ("Chest", 0x06, 0x03, None, ("Spirit Temple",))),
("Spirit Temple Sun Block Room Chest", ("Chest", 0x06, 0x01, None, ("Spirit Temple",))),
("Spirit Temple MQ Entrance Front Left Chest", ("Chest", 0x06, 0x1A, None, ("Spirit Temple",))),
("Spirit Temple MQ Entrance Back Right Chest", ("Chest", 0x06, 0x1F, None, ("Spirit Temple",))),
("Spirit Temple MQ Entrance Front Right Chest", ("Chest", 0x06, 0x1B, None, ("Spirit Temple",))),
("Spirit Temple MQ Entrance Back Left Chest", ("Chest", 0x06, 0x1E, None, ("Spirit Temple",))),
("Spirit Temple MQ Map Chest", ("Chest", 0x06, 0x00, None, ("Spirit Temple",))),
("Spirit Temple MQ Map Room Enemy Chest", ("Chest", 0x06, 0x08, None, ("Spirit Temple",))),
("Spirit Temple MQ Child Climb North Chest", ("Chest", 0x06, 0x06, None, ("Spirit Temple",))),
("Spirit Temple MQ Child Climb South Chest", ("Chest", 0x06, 0x0C, None, ("Spirit Temple",))),
("Spirit Temple MQ Compass Chest", ("Chest", 0x06, 0x03, None, ("Spirit Temple",))),
("Spirit Temple MQ Silver Block Hallway Chest", ("Chest", 0x06, 0x1C, None, ("Spirit Temple",))),
("Spirit Temple MQ Sun Block Room Chest", ("Chest", 0x06, 0x01, None, ("Spirit Temple",))),
("Spirit Temple Silver Gauntlets Chest", ("Chest", 0x5C, 0x0B, None, ("Spirit Temple", "Desert Colossus"))),
("Spirit Temple Compass Chest", ("Chest", 0x06, 0x04, None, ("Spirit Temple",))),
("Spirit Temple Early Adult Right Chest", ("Chest", 0x06, 0x07, None, ("Spirit Temple",))),
("Spirit Temple First Mirror Left Chest", ("Chest", 0x06, 0x0D, None, ("Spirit Temple",))),
("Spirit Temple First Mirror Right Chest", ("Chest", 0x06, 0x0E, None, ("Spirit Temple",))),
("Spirit Temple Statue Room Northeast Chest", ("Chest", 0x06, 0x0F, None, ("Spirit Temple",))),
("Spirit Temple Statue Room Hand Chest", ("Chest", 0x06, 0x02, None, ("Spirit Temple",))),
("Spirit Temple Near Four Armos Chest", ("Chest", 0x06, 0x05, None, ("Spirit Temple",))),
("Spirit Temple Hallway Right Invisible Chest", ("Chest", 0x06, 0x14, None, ("Spirit Temple",))),
("Spirit Temple Hallway Left Invisible Chest", ("Chest", 0x06, 0x15, None, ("Spirit Temple",))),
("Spirit Temple MQ Child Hammer Switch Chest", ("Chest", 0x06, 0x1D, None, ("Spirit Temple",))),
("Spirit Temple MQ Statue Room Lullaby Chest", ("Chest", 0x06, 0x0F, None, ("Spirit Temple",))),
("Spirit Temple MQ Statue Room Invisible Chest", ("Chest", 0x06, 0x02, None, ("Spirit Temple",))),
("Spirit Temple MQ Leever Room Chest", ("Chest", 0x06, 0x04, None, ("Spirit Temple",))),
("Spirit Temple MQ Symphony Room Chest", ("Chest", 0x06, 0x07, None, ("Spirit Temple",))),
("Spirit Temple MQ Beamos Room Chest", ("Chest", 0x06, 0x19, None, ("Spirit Temple",))),
("Spirit Temple MQ Chest Switch Chest", ("Chest", 0x06, 0x18, None, ("Spirit Temple",))),
("Spirit Temple MQ Boss Key Chest", ("Chest", 0x06, 0x05, None, ("Spirit Temple",))),
("Spirit Temple Mirror Shield Chest", ("Chest", 0x5C, 0x09, None, ("Spirit Temple", "Desert Colossus"))),
("Spirit Temple Boss Key Chest", ("Chest", 0x06, 0x0A, None, ("Spirit Temple",))),
("Spirit Temple Topmost Chest", ("Chest", 0x06, 0x12, None, ("Spirit Temple",))),
("Spirit Temple MQ Mirror Puzzle Invisible Chest", ("Chest", 0x06, 0x12, None, ("Spirit Temple",))),
("Spirit Temple GS Metal Fence", ("GS Token", 0x06, 0x10, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple GS Sun on Floor Room", ("GS Token", 0x06, 0x08, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple GS Hall After Sun Block Room", ("GS Token", 0x06, 0x01, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple GS Lobby", ("GS Token", 0x06, 0x04, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple GS Boulder Room", ("GS Token", 0x06, 0x02, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple MQ GS Sun Block Room", ("GS Token", 0x06, 0x01, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple MQ GS Leever Room", ("GS Token", 0x06, 0x02, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple MQ GS Symphony Room", ("GS Token", 0x06, 0x08, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple MQ GS Nine Thrones Room West", ("GS Token", 0x06, 0x04, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple MQ GS Nine Thrones Room North", ("GS Token", 0x06, 0x10, None, ("Spirit Temple", "Skulltulas",))),
("Spirit Temple Twinrova Heart", ("BossHeart", 0x17, 0x4F, None, ("Spirit Temple",))),
# Ice Cavern vanilla
("Ice Cavern Map Chest", ("Chest", 0x09, 0x00, None, ("Ice Cavern",))),
("Ice Cavern Compass Chest", ("Chest", 0x09, 0x01, None, ("Ice Cavern",))),
("Ice Cavern Freestanding PoH", ("Collectable", 0x09, 0x01, None, ("Ice Cavern",))),
("Ice Cavern Iron Boots Chest", ("Chest", 0x09, 0x02, None, ("Ice Cavern",))),
("Ice Cavern GS Spinning Scythe Room", ("GS Token", 0x09, 0x02, None, ("Ice Cavern", "Skulltulas",))),
("Ice Cavern GS Heart Piece Room", ("GS Token", 0x09, 0x04, None, ("Ice Cavern", "Skulltulas",))),
("Ice Cavern GS Push Block Room", ("GS Token", 0x09, 0x01, None, ("Ice Cavern", "Skulltulas",))),
# Ice Cavern MQ
("Ice Cavern MQ Map Chest", ("Chest", 0x09, 0x01, None, ("Ice Cavern",))),
("Ice Cavern MQ Compass Chest", ("Chest", 0x09, 0x00, None, ("Ice Cavern",))),
("Ice Cavern MQ Freestanding PoH", ("Collectable", 0x09, 0x01, None, ("Ice Cavern",))),
("Ice Cavern MQ Iron Boots Chest", ("Chest", 0x09, 0x02, None, ("Ice Cavern",))),
("Ice Cavern MQ GS Red Ice", ("GS Token", 0x09, 0x02, None, ("Ice Cavern", "Skulltulas",))),
("Ice Cavern MQ GS Ice Block", ("GS Token", 0x09, 0x04, None, ("Ice Cavern", "Skulltulas",))),
("Ice Cavern MQ GS Scarecrow", ("GS Token", 0x09, 0x01, None, ("Ice Cavern", "Skulltulas",))),
# Gerudo Training Grounds vanilla
("Gerudo Training Grounds Lobby Left Chest", ("Chest", 0x0B, 0x13, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Lobby Right Chest", ("Chest", 0x0B, 0x07, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Stalfos Chest", ("Chest", 0x0B, 0x00, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Before Heavy Block Chest", ("Chest", 0x0B, 0x11, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Heavy Block First Chest", ("Chest", 0x0B, 0x0F, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Heavy Block Second Chest", ("Chest", 0x0B, 0x0E, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Heavy Block Third Chest", ("Chest", 0x0B, 0x14, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Heavy Block Fourth Chest", ("Chest", 0x0B, 0x02, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Eye Statue Chest", ("Chest", 0x0B, 0x03, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Near Scarecrow Chest", ("Chest", 0x0B, 0x04, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Hammer Room Clear Chest", ("Chest", 0x0B, 0x12, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Hammer Room Switch Chest", ("Chest", 0x0B, 0x10, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Freestanding Key", ("Collectable", 0x0B, 0x01, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Right Central Chest", ("Chest", 0x0B, 0x05, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Right Side Chest", ("Chest", 0x0B, 0x08, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Underwater Silver Rupee Chest", ("Chest", 0x0B, 0x0D, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Beamos Chest", ("Chest", 0x0B, 0x01, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Hidden Ceiling Chest", ("Chest", 0x0B, 0x0B, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Path First Chest", ("Chest", 0x0B, 0x06, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Path Second Chest", ("Chest", 0x0B, 0x0A, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Path Third Chest", ("Chest", 0x0B, 0x09, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds Maze Path Final Chest", ("Chest", 0x0B, 0x0C, None, ("Gerudo Training Grounds",))),
# Gerudo Training Grounds MQ
("Gerudo Training Grounds MQ Lobby Left Chest", ("Chest", 0x0B, 0x13, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Lobby Right Chest", ("Chest", 0x0B, 0x07, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ First Iron Knuckle Chest", ("Chest", 0x0B, 0x00, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Before Heavy Block Chest", ("Chest", 0x0B, 0x11, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Heavy Block Chest", ("Chest", 0x0B, 0x02, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Eye Statue Chest", ("Chest", 0x0B, 0x03, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Ice Arrows Chest", ("Chest", 0x0B, 0x04, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Second Iron Knuckle Chest",("Chest", 0x0B, 0x12, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Flame Circle Chest", ("Chest", 0x0B, 0x0E, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Maze Right Central Chest", ("Chest", 0x0B, 0x05, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Maze Right Side Chest", ("Chest", 0x0B, 0x08, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Underwater Silver Rupee Chest", ("Chest", 0x0B, 0x0D, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Dinolfos Chest", ("Chest", 0x0B, 0x01, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Hidden Ceiling Chest", ("Chest", 0x0B, 0x0B, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Maze Path First Chest", ("Chest", 0x0B, 0x06, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Maze Path Third Chest", ("Chest", 0x0B, 0x09, None, ("Gerudo Training Grounds",))),
("Gerudo Training Grounds MQ Maze Path Second Chest", ("Chest", 0x0B, 0x0A, None, ("Gerudo Training Grounds",))),
# Ganon's Castle vanilla
("Ganons Castle Forest Trial Chest", ("Chest", 0x0D, 0x09, None, ("Ganon's Castle",))),
("Ganons Castle Water Trial Left Chest", ("Chest", 0x0D, 0x07, None, ("Ganon's Castle",))),
("Ganons Castle Water Trial Right Chest", ("Chest", 0x0D, 0x06, None, ("Ganon's Castle",))),
("Ganons Castle Shadow Trial Front Chest", ("Chest", 0x0D, 0x08, None, ("Ganon's Castle",))),
("Ganons Castle Shadow Trial Golden Gauntlets Chest", ("Chest", 0x0D, 0x05, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial First Left Chest", ("Chest", 0x0D, 0x0C, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Second Left Chest", ("Chest", 0x0D, 0x0B, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Third Left Chest", ("Chest", 0x0D, 0x0D, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial First Right Chest", ("Chest", 0x0D, 0x0E, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Second Right Chest", ("Chest", 0x0D, 0x0A, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Third Right Chest", ("Chest", 0x0D, 0x0F, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Invisible Enemies Chest", ("Chest", 0x0D, 0x10, None, ("Ganon's Castle",))),
("Ganons Castle Light Trial Lullaby Chest", ("Chest", 0x0D, 0x11, None, ("Ganon's Castle",))),
("Ganons Castle Spirit Trial Crystal Switch Chest", ("Chest", 0x0D, 0x12, None, ("Ganon's Castle",))),
("Ganons Castle Spirit Trial Invisible Chest", ("Chest", 0x0D, 0x14, None, ("Ganon's Castle",))),
("Ganons Castle Deku Scrub Left", ("NPC", 0x0D, 0x3A, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle Deku Scrub Center-Left", ("NPC", 0x0D, 0x37, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle Deku Scrub Center-Right", ("NPC", 0x0D, 0x33, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle Deku Scrub Right", ("NPC", 0x0D, 0x39, None, ("Ganon's Castle", "Deku Scrub",))),
# Ganon's Castle MQ
("Ganons Castle MQ Forest Trial Freestanding Key", ("Collectable", 0x0D, 0x01, None, ("Ganon's Castle",))),
("Ganons Castle MQ Forest Trial Eye Switch Chest", ("Chest", 0x0D, 0x02, None, ("Ganon's Castle",))),
("Ganons Castle MQ Forest Trial Frozen Eye Switch Chest", ("Chest", 0x0D, 0x03, None, ("Ganon's Castle",))),
("Ganons Castle MQ Water Trial Chest", ("Chest", 0x0D, 0x01, None, ("Ganon's Castle",))),
("Ganons Castle MQ Shadow Trial Bomb Flower Chest", ("Chest", 0x0D, 0x00, None, ("Ganon's Castle",))),
("Ganons Castle MQ Shadow Trial Eye Switch Chest", ("Chest", 0x0D, 0x05, None, ("Ganon's Castle",))),
("Ganons Castle MQ Light Trial Lullaby Chest", ("Chest", 0x0D, 0x04, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial First Chest", ("Chest", 0x0D, 0x0A, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial Invisible Chest", ("Chest", 0x0D, 0x14, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial Sun Front Left Chest", ("Chest", 0x0D, 0x09, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial Sun Back Left Chest", ("Chest", 0x0D, 0x08, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial Sun Back Right Chest", ("Chest", 0x0D, 0x07, None, ("Ganon's Castle",))),
("Ganons Castle MQ Spirit Trial Golden Gauntlets Chest",("Chest", 0x0D, 0x06, None, ("Ganon's Castle",))),
("Ganons Castle MQ Deku Scrub Left", ("NPC", 0x0D, 0x3A, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle MQ Deku Scrub Center-Left", ("NPC", 0x0D, 0x37, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle MQ Deku Scrub Center", ("NPC", 0x0D, 0x33, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle MQ Deku Scrub Center-Right", ("NPC", 0x0D, 0x39, None, ("Ganon's Castle", "Deku Scrub",))),
("Ganons Castle MQ Deku Scrub Right", ("NPC", 0x0D, 0x30, None, ("Ganon's Castle", "Deku Scrub",))),
# Ganon's Castle shared
("Ganons Tower Boss Key Chest", ("Chest", 0x0A, 0x0B, None, ("Ganon's Castle",))),
## Events and Drops
("Pierre", ("Event", None, None, None, None)),
("Deliver Rutos Letter", ("Event", None, None, None, None)),
("Master Sword Pedestal", ("Event", None, None, None, None)),
("Deku Baba Sticks", ("Drop", None, None, None, None)),
("Deku Baba Nuts", ("Drop", None, None, None, None)),
("Stick Pot", ("Drop", None, None, None, None)),
("Nut Pot", ("Drop", None, None, None, None)),
("Nut Crate", ("Drop", None, None, None, None)),
("Blue Fire", ("Drop", None, None, None, None)),
("Lone Fish", ("Drop", None, None, None, None)),
("Fish Group", ("Drop", None, None, None, None)),
("Bug Rock", ("Drop", None, None, None, None)),
("Bug Shrub", ("Drop", None, None, None, None)),
("Wandering Bugs", ("Drop", None, None, None, None)),
("Fairy Pot", ("Drop", None, None, None, None)),
("Free Fairies", ("Drop", None, None, None, None)),
("Wall Fairy", ("Drop", None, None, None, None)),
("Butterfly Fairy", ("Drop", None, None, None, None)),
("Gossip Stone Fairy", ("Drop", None, None, None, None)),
("Bean Plant Fairy", ("Drop", None, None, None, None)),
("Fairy Pond", ("Drop", None, None, None, None)),
("Big Poe Kill", ("Drop", None, None, None, None)),
## Hints
# These are not actual locations, but are filler spots used for hint reachability.
# Hint location types must start with 'Hint'.
("DMC Gossip Stone", ("HintStone", None, None, None, None)),
("DMT Gossip Stone", ("HintStone", None, None, None, None)),
("Colossus Gossip Stone", ("HintStone", None, None, None, None)),
("Dodongos Cavern Gossip Stone", ("HintStone", None, None, None, None)),
("GV Gossip Stone", ("HintStone", None, None, None, None)),
("GC Maze Gossip Stone", ("HintStone", None, None, None, None)),
("GC Medigoron Gossip Stone", ("HintStone", None, None, None, None)),
("Graveyard Gossip Stone", ("HintStone", None, None, None, None)),
("HC Malon Gossip Stone", ("HintStone", None, None, None, None)),
("HC Rock Wall Gossip Stone", ("HintStone", None, None, None, None)),
("HC Storms Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("HF Cow Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("KF Deku Tree Gossip Stone (Left)", ("HintStone", None, None, None, None)),
("KF Deku Tree Gossip Stone (Right)", ("HintStone", None, None, None, None)),
("KF Gossip Stone", ("HintStone", None, None, None, None)),
("LH Lab Gossip Stone", ("HintStone", None, None, None, None)),
("LH Gossip Stone (Southeast)", ("HintStone", None, None, None, None)),
("LH Gossip Stone (Southwest)", ("HintStone", None, None, None, None)),
("LW Gossip Stone", ("HintStone", None, None, None, None)),
("SFM Maze Gossip Stone (Lower)", ("HintStone", None, None, None, None)),
("SFM Maze Gossip Stone (Upper)", ("HintStone", None, None, None, None)),
("SFM Saria Gossip Stone", ("HintStone", None, None, None, None)),
("ToT Gossip Stone (Left)", ("HintStone", None, None, None, None)),
("ToT Gossip Stone (Left-Center)", ("HintStone", None, None, None, None)),
("ToT Gossip Stone (Right)", ("HintStone", None, None, None, None)),
("ToT Gossip Stone (Right-Center)", ("HintStone", None, None, None, None)),
("ZD Gossip Stone", ("HintStone", None, None, None, None)),
("ZF Fairy Gossip Stone", ("HintStone", None, None, None, None)),
("ZF Jabu Gossip Stone", ("HintStone", None, None, None, None)),
("ZR Near Grottos Gossip Stone", ("HintStone", None, None, None, None)),
("ZR Near Domain Gossip Stone", ("HintStone", None, None, None, None)),
("HF Near Market Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("HF Southeast Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("HF Open Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("Kak Open Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("ZR Open Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("KF Storms Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("LW Near Shortcuts Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("DMT Storms Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("DMC Upper Grotto Gossip Stone", ("HintStone", None, None, None, None)),
("Ganondorf Hint", ("Hint", None, None, None, None)),
])
location_sort_order = {
loc: i for i, loc in enumerate(location_table.keys())
}
# Business Scrub Details
business_scrubs = [
# id price text text replacement
(0x30, 20, 0x10A0, ["Deku Nuts", "a \x05\x42mysterious item\x05\x40"]),
(0x31, 15, 0x10A1, ["Deku Sticks", "a \x05\x42mysterious item\x05\x40"]),
(0x3E, 10, 0x10A2, ["Piece of Heart", "\x05\x42mysterious item\x05\x40"]),
(0x33, 40, 0x10CA, ["\x05\x41Deku Seeds", "a \x05\x42mysterious item"]),
(0x34, 50, 0x10CB, ["\x41Deku Shield", "\x42mysterious item"]),
(0x37, 40, 0x10CC, ["\x05\x41Bombs", "a \x05\x42mysterious item"]),
(0x38, 00, 0x10CD, ["\x05\x41Arrows", "a \x05\x42mysterious item"]), # unused
(0x39, 40, 0x10CE, ["\x05\x41Red Potion", "\x05\x42mysterious item"]),
(0x3A, 40, 0x10CF, ["Green Potion", "mysterious item"]),
(0x77, 40, 0x10DC, ["enable you to pick up more\x01\x05\x41Deku Sticks", "sell you a \x05\x42mysterious item"]),
(0x79, 40, 0x10DD, ["enable you to pick up more \x05\x41Deku\x01Nuts", "sell you a \x05\x42mysterious item"]),
]
dungeons = ('Deku Tree', 'Dodongo\'s Cavern', 'Jabu Jabu\'s Belly', 'Forest Temple', 'Fire Temple', 'Water Temple', 'Spirit Temple', 'Shadow Temple', 'Ice Cavern', 'Bottom of the Well', 'Gerudo Training Grounds', 'Ganon\'s Castle')
location_groups = {
'Song': [name for (name, data) in location_table.items() if data[0] == 'Song'],
'Chest': [name for (name, data) in location_table.items() if data[0] == 'Chest'],
'Collectable': [name for (name, data) in location_table.items() if data[0] == 'Collectable'],
'BossHeart': [name for (name, data) in location_table.items() if data[0] == 'BossHeart'],
'CollectableLike': [name for (name, data) in location_table.items() if data[0] in ('Collectable', 'BossHeart', 'GS Token')],
'CanSee': [name for (name, data) in location_table.items()
if data[0] in ('Collectable', 'BossHeart', 'GS Token', 'Shop')
# Treasure Box Shop, Bombchu Bowling, Hyrule Field (OoT), Lake Hylia (RL/FA)
or data[0:2] in [('Chest', 0x10), ('NPC', 0x4B), ('NPC', 0x51), ('NPC', 0x57)]],
'Dungeon': [name for (name, data) in location_table.items() if data[4] is not None and any(dungeon in data[4] for dungeon in dungeons)],
}
def location_is_viewable(loc_name, correct_chest_sizes):
return correct_chest_sizes and loc_name in location_groups['Chest'] or loc_name in location_groups['CanSee']
# Function to run exactly once after after placing items in drop locations for each world
# Sets all Drop locations to a unique name in order to avoid name issues and to identify locations in the spoiler
def set_drop_location_names(world):
for location in world.get_locations():
if location.type == 'Drop':
location.name = location.parent_region.name + " " + location.name
| 117.967811 | 231 | 0.4442 |
acf2c0aa1f088833625b011a16bc9d7260c0cecb | 1,046 | py | Python | examples/http/hello_world.py | viatoriche/microservices | 3510563edd15dc6131b8a948d6062856cd904ac7 | [
"MIT"
] | 18 | 2016-04-04T03:01:37.000Z | 2020-08-18T03:03:40.000Z | examples/http/hello_world.py | viatoriche/microservices | 3510563edd15dc6131b8a948d6062856cd904ac7 | [
"MIT"
] | 7 | 2016-05-06T14:23:16.000Z | 2019-11-20T11:16:35.000Z | examples/http/hello_world.py | viatoriche/microservices | 3510563edd15dc6131b8a948d6062856cd904ac7 | [
"MIT"
] | 5 | 2016-05-06T08:20:40.000Z | 2019-07-13T01:34:38.000Z | from microservices.http.service import Microservice
from microservices.http.resources import ResourceMarker, ResourceSchema, BrowserResourceSchema
from flask import request
import datetime
app = Microservice(__name__)
app.config['SCHEMA'] = ResourceSchema(
response='result',
response_update=False,
status_code='status',
browser=BrowserResourceSchema(
status=None,
)
)
@app.route(
'/',
resource=ResourceMarker(
update={
'resource_created': datetime.datetime.now().isoformat()
},
),
)
def hello_world():
return {'hello': 'Hello, world'}
@app.route(
'/<string:one>/<string:two>/<string:three>/',
methods=['GET', 'POST'],
resource=ResourceMarker(
url_params={'one': 'one', 'two': 'two', 'three': 'three'},
)
)
def one_two_three(one, two, three):
response = {'one': one, 'two': two, 'three': three}
if request.method == 'POST':
response['data'] = request.data
return response
if __name__ == '__main__':
app.run(debug=True)
| 23.244444 | 94 | 0.644359 |
acf2c1a2324d229c986da3a2fffd82f810cf3cd2 | 53,833 | py | Python | py34/bacpypes/apdu.py | epaulson/bacpypes | 4111b8604a16fa2b7f80d8104a43b9f3e28dfc78 | [
"MIT"
] | null | null | null | py34/bacpypes/apdu.py | epaulson/bacpypes | 4111b8604a16fa2b7f80d8104a43b9f3e28dfc78 | [
"MIT"
] | null | null | null | py34/bacpypes/apdu.py | epaulson/bacpypes | 4111b8604a16fa2b7f80d8104a43b9f3e28dfc78 | [
"MIT"
] | null | null | null | #!/usr/bin/python
"""
Application Layer Protocol Data Units
"""
from .errors import DecodingError, TooManyArguments
from .debugging import ModuleLogger, DebugContents, bacpypes_debugging
from .pdu import PCI, PDUData
from .primitivedata import Boolean, CharacterString, Enumerated, Integer, \
ObjectIdentifier, ObjectType, OctetString, Real, TagList, Unsigned, \
expand_enumerations
from .constructeddata import Any, Choice, Element, Sequence, SequenceOf
from .basetypes import ChannelValue, DateTime, DeviceAddress, ErrorType, \
EventState, EventTransitionBits, EventType, LifeSafetyOperation, \
NotificationParameters, NotifyType, ObjectPropertyReference, \
PropertyIdentifier, PropertyReference, PropertyValue, RecipientProcess, \
ResultFlags, Segmentation, TimeStamp, VTClass
# some debugging
_debug = 0
_log = ModuleLogger(globals())
# a dictionary of message type values and classes
apdu_types = {}
def register_apdu_type(klass):
apdu_types[klass.pduType] = klass
# a dictionary of confirmed request choices and classes
confirmed_request_types = {}
def register_confirmed_request_type(klass):
confirmed_request_types[klass.serviceChoice] = klass
# a dictionary of complex ack choices and classes
complex_ack_types = {}
def register_complex_ack_type(klass):
complex_ack_types[klass.serviceChoice] = klass
# a dictionary of unconfirmed request choices and classes
unconfirmed_request_types = {}
def register_unconfirmed_request_type(klass):
unconfirmed_request_types[klass.serviceChoice] = klass
# a dictionary of unconfirmed request choices and classes
error_types = {}
def register_error_type(klass):
error_types[klass.serviceChoice] = klass
#
# encode_max_segments_accepted/decode_max_segments_accepted
#
_max_segments_accepted_encoding = [
None, 2, 4, 8, 16, 32, 64, None,
]
def encode_max_segments_accepted(arg):
"""Encode the maximum number of segments the device will accept, Section
20.1.2.4, and if the device says it can only accept one segment it shouldn't
say that it supports segmentation!"""
# unspecified
if not arg:
return 0
if arg > 64:
return 7
# the largest number not greater than the arg
for i in range(6, 0, -1):
if _max_segments_accepted_encoding[i] <= arg:
return i
raise ValueError("invalid max max segments accepted: {0}".format(arg))
def decode_max_segments_accepted(arg):
"""Decode the maximum number of segments the device will accept, Section
20.1.2.4"""
return _max_segments_accepted_encoding[arg]
#
# encode_max_apdu_length_accepted/decode_max_apdu_length_accepted
#
_max_apdu_length_encoding = [50, 128, 206, 480, 1024, 1476, None, None,
None, None, None, None, None, None, None, None]
def encode_max_apdu_length_accepted(arg):
"""Return the encoding of the highest encodable value less than the
value of the arg."""
for i in range(5, -1, -1):
if (arg >= _max_apdu_length_encoding[i]):
return i
raise ValueError("invalid max APDU length accepted: {0}".format(arg))
def decode_max_apdu_length_accepted(arg):
v = _max_apdu_length_encoding[arg]
if not v:
raise ValueError("invalid max APDU length accepted: {0}".format(arg))
return v
#
# APCI
#
@bacpypes_debugging
class APCI(PCI, DebugContents):
_debug_contents = ('apduType', 'apduSeg', 'apduMor', 'apduSA', 'apduSrv'
, 'apduNak', 'apduSeq', 'apduWin', 'apduMaxSegs', 'apduMaxResp'
, 'apduService', 'apduInvokeID', 'apduAbortRejectReason'
)
def __init__(self, *args, **kwargs):
if _debug: APCI._debug("__init__ %r %r", args, kwargs)
super(APCI, self).__init__(*args, **kwargs)
self.apduType = None
self.apduSeg = None # segmented
self.apduMor = None # more follows
self.apduSA = None # segmented response accepted
self.apduSrv = None # sent by server
self.apduNak = None # negative acknowledgement
self.apduSeq = None # sequence number
self.apduWin = None # actual/proposed window size
self.apduMaxSegs = None # maximum segments accepted (decoded)
self.apduMaxResp = None # max response accepted (decoded)
self.apduService = None #
self.apduInvokeID = None #
self.apduAbortRejectReason = None #
def update(self, apci):
PCI.update(self, apci)
self.apduType = apci.apduType
self.apduSeg = apci.apduSeg
self.apduMor = apci.apduMor
self.apduSA = apci.apduSA
self.apduSrv = apci.apduSrv
self.apduNak = apci.apduNak
self.apduSeq = apci.apduSeq
self.apduWin = apci.apduWin
self.apduMaxSegs = apci.apduMaxSegs
self.apduMaxResp = apci.apduMaxResp
self.apduService = apci.apduService
self.apduInvokeID = apci.apduInvokeID
self.apduAbortRejectReason = apci.apduAbortRejectReason
def __repr__(self):
"""Return a string representation of the PDU."""
# start with the class name
sname = self.__module__ + '.' + self.__class__.__name__
# expand the type if possible
stype = apdu_types.get(self.apduType, None)
if stype:
stype = stype.__name__
else:
stype = '?'
# add the invoke ID if it has one
if self.apduInvokeID is not None:
stype += ',' + str(self.apduInvokeID)
# put it together
return "<{0}({1}) instance at {2}>".format(sname, stype, hex(id(self)))
def encode(self, pdu):
"""encode the contents of the APCI into the PDU."""
if _debug: APCI._debug("encode %r", pdu)
PCI.update(pdu, self)
if (self.apduType == ConfirmedRequestPDU.pduType):
# PDU type
buff = self.apduType << 4
if self.apduSeg:
buff += 0x08
if self.apduMor:
buff += 0x04
if self.apduSA:
buff += 0x02
pdu.put(buff)
pdu.put((self.apduMaxSegs << 4) + self.apduMaxResp)
pdu.put(self.apduInvokeID)
if self.apduSeg:
pdu.put(self.apduSeq)
pdu.put(self.apduWin)
pdu.put(self.apduService)
elif (self.apduType == UnconfirmedRequestPDU.pduType):
pdu.put(self.apduType << 4)
pdu.put(self.apduService)
elif (self.apduType == SimpleAckPDU.pduType):
pdu.put(self.apduType << 4)
pdu.put(self.apduInvokeID)
pdu.put(self.apduService)
elif (self.apduType == ComplexAckPDU.pduType):
# PDU type
buff = self.apduType << 4
if self.apduSeg:
buff += 0x08
if self.apduMor:
buff += 0x04
pdu.put(buff)
pdu.put(self.apduInvokeID)
if self.apduSeg:
pdu.put(self.apduSeq)
pdu.put(self.apduWin)
pdu.put(self.apduService)
elif (self.apduType == SegmentAckPDU.pduType):
# PDU type
buff = self.apduType << 4
if self.apduNak:
buff += 0x02
if self.apduSrv:
buff += 0x01
pdu.put(buff)
pdu.put(self.apduInvokeID)
pdu.put(self.apduSeq)
pdu.put(self.apduWin)
elif (self.apduType == ErrorPDU.pduType):
pdu.put(self.apduType << 4)
pdu.put(self.apduInvokeID)
pdu.put(self.apduService)
elif (self.apduType == RejectPDU.pduType):
pdu.put(self.apduType << 4)
pdu.put(self.apduInvokeID)
pdu.put(self.apduAbortRejectReason)
elif (self.apduType == AbortPDU.pduType):
# PDU type
buff = self.apduType << 4
if self.apduSrv:
buff += 0x01
pdu.put(buff)
pdu.put(self.apduInvokeID)
pdu.put(self.apduAbortRejectReason)
else:
raise ValueError("invalid APCI.apduType")
def decode(self, pdu):
"""decode the contents of the PDU into the APCI."""
if _debug: APCI._debug("decode %r", pdu)
PCI.update(self, pdu)
# decode the first octet
buff = pdu.get()
# decode the APCI type
self.apduType = (buff >> 4) & 0x0F
if (self.apduType == ConfirmedRequestPDU.pduType):
self.apduSeg = ((buff & 0x08) != 0)
self.apduMor = ((buff & 0x04) != 0)
self.apduSA = ((buff & 0x02) != 0)
buff = pdu.get()
self.apduMaxSegs = (buff >> 4) & 0x07
self.apduMaxResp = buff & 0x0F
self.apduInvokeID = pdu.get()
if self.apduSeg:
self.apduSeq = pdu.get()
self.apduWin = pdu.get()
self.apduService = pdu.get()
self.pduData = pdu.pduData
elif (self.apduType == UnconfirmedRequestPDU.pduType):
self.apduService = pdu.get()
self.pduData = pdu.pduData
elif (self.apduType == SimpleAckPDU.pduType):
self.apduInvokeID = pdu.get()
self.apduService = pdu.get()
elif (self.apduType == ComplexAckPDU.pduType):
self.apduSeg = ((buff & 0x08) != 0)
self.apduMor = ((buff & 0x04) != 0)
self.apduInvokeID = pdu.get()
if self.apduSeg:
self.apduSeq = pdu.get()
self.apduWin = pdu.get()
self.apduService = pdu.get()
self.pduData = pdu.pduData
elif (self.apduType == SegmentAckPDU.pduType):
self.apduNak = ((buff & 0x02) != 0)
self.apduSrv = ((buff & 0x01) != 0)
self.apduInvokeID = pdu.get()
self.apduSeq = pdu.get()
self.apduWin = pdu.get()
elif (self.apduType == ErrorPDU.pduType):
self.apduInvokeID = pdu.get()
self.apduService = pdu.get()
self.pduData = pdu.pduData
elif (self.apduType == RejectPDU.pduType):
self.apduInvokeID = pdu.get()
self.apduAbortRejectReason = pdu.get()
elif (self.apduType == AbortPDU.pduType):
self.apduSrv = ((buff & 0x01) != 0)
self.apduInvokeID = pdu.get()
self.apduAbortRejectReason = pdu.get()
self.pduData = pdu.pduData
else:
raise DecodingError("invalid APDU type")
def apci_contents(self, use_dict=None, as_class=dict):
"""Return the contents of an object as a dict."""
if _debug: APCI._debug("apci_contents use_dict=%r as_class=%r", use_dict, as_class)
# make/extend the dictionary of content
if use_dict is None:
use_dict = as_class()
# copy the source and destination to make it easier to search
if self.pduSource:
use_dict.__setitem__('source', str(self.pduSource))
if self.pduDestination:
use_dict.__setitem__('destination', str(self.pduDestination))
# loop through the elements
for attr in APCI._debug_contents:
value = getattr(self, attr, None)
if value is None:
continue
if attr == 'apduType':
mapped_value = apdu_types[self.apduType].__name__
elif attr == 'apduService':
if self.apduType in (ConfirmedRequestPDU.pduType, SimpleAckPDU.pduType, ComplexAckPDU.pduType):
mapped_value = confirmed_request_types[self.apduService].__name__
elif (self.apduType == UnconfirmedRequestPDU.pduType):
mapped_value = unconfirmed_request_types[self.apduService].__name__
elif (self.apduType == ErrorPDU.pduType):
mapped_value = error_types[self.apduService].__name__
else:
mapped_value = value
# save the mapped value
use_dict.__setitem__(attr, mapped_value)
# return what we built/updated
return use_dict
#
# APDU
#
@bacpypes_debugging
class APDU(APCI, PDUData):
def __init__(self, *args, **kwargs):
if _debug: APDU._debug("__init__ %r %r", args, kwargs)
super(APDU, self).__init__(*args, **kwargs)
def encode(self, pdu):
if _debug: APDU._debug("encode %s", str(pdu))
APCI.encode(self, pdu)
pdu.put_data(self.pduData)
def decode(self, pdu):
if _debug: APDU._debug("decode %s", str(pdu))
APCI.decode(self, pdu)
self.pduData = pdu.get_data(len(pdu.pduData))
def apdu_contents(self, use_dict=None, as_class=dict):
return PDUData.pdudata_contents(self, use_dict=use_dict, as_class=as_class)
def dict_contents(self, use_dict=None, as_class=dict):
"""Return the contents of an object as a dict."""
if _debug: APDU._debug("dict_contents use_dict=%r as_class=%r", use_dict, as_class)
# make/extend the dictionary of content
if use_dict is None:
use_dict = as_class()
# call the parent classes
self.apci_contents(use_dict=use_dict, as_class=as_class)
self.apdu_contents(use_dict=use_dict, as_class=as_class)
# return what we built/updated
return use_dict
#------------------------------
#
# _APDU
#
# This class masks the encode() and decode() functions of the APDU
# so that derived classes use the update function to copy the contents
# between PDU's. Otherwise the APCI content would be decoded twice.
#
@bacpypes_debugging
class _APDU(APDU):
def encode(self, pdu):
if _debug: _APDU._debug("encode %r", pdu)
APCI.update(pdu, self)
pdu.put_data(self.pduData)
def decode(self, pdu):
if _debug: _APDU._debug("decode %r", pdu)
APCI.update(self, pdu)
self.pduData = pdu.get_data(len(pdu.pduData))
def set_context(self, context):
if _debug: _APDU._debug("set_context %r", context)
self.pduUserData = context.pduUserData
self.pduDestination = context.pduSource
self.pduExpectingReply = 0
self.pduNetworkPriority = context.pduNetworkPriority
self.apduInvokeID = context.apduInvokeID
def __repr__(self):
"""Return a string representation of the APDU."""
# start with the class name
sname = self.__module__ + '.' + self.__class__.__name__
# the type is the service
stype = str(self.apduService)
# add the invoke ID if it has one
if self.apduInvokeID is not None:
stype += ',' + str(self.apduInvokeID)
# put it together
return "<{0}({1}) instance at {2}>".format(sname, stype, hex(id(self)))
#
# ConfirmedRequestPDU
#
@bacpypes_debugging
class ConfirmedRequestPDU(_APDU):
pduType = 0
def __init__(self, choice=None, *args, **kwargs):
if _debug: ConfirmedRequestPDU._debug("__init__ %r %r %r", choice, args, kwargs)
super(ConfirmedRequestPDU, self).__init__(*args, **kwargs)
self.apduType = ConfirmedRequestPDU.pduType
self.apduService = choice
self.pduExpectingReply = 1
register_apdu_type(ConfirmedRequestPDU)
#
# UnconfirmedRequestPDU
#
@bacpypes_debugging
class UnconfirmedRequestPDU(_APDU):
pduType = 1
def __init__(self, choice=None, *args, **kwargs):
if _debug: UnconfirmedRequestPDU._debug("__init__ %r %r %r", choice, args, kwargs)
super(UnconfirmedRequestPDU, self).__init__(*args, **kwargs)
self.apduType = UnconfirmedRequestPDU.pduType
self.apduService = choice
register_apdu_type(UnconfirmedRequestPDU)
#
# SimpleAckPDU
#
@bacpypes_debugging
class SimpleAckPDU(_APDU):
pduType = 2
def __init__(self, choice=None, invokeID=None, context=None, *args, **kwargs):
if _debug: SimpleAckPDU._debug("__init__ %r %r %r %r %r", choice, invokeID, context, args, kwargs)
super(SimpleAckPDU, self).__init__(*args, **kwargs)
self.apduType = SimpleAckPDU.pduType
self.apduService = choice
self.apduInvokeID = invokeID
# use the context to fill in most of the fields
if context is not None:
self.apduService = context.apduService
self.set_context(context)
register_apdu_type(SimpleAckPDU)
#
# ComplexAckPDU
#
@bacpypes_debugging
class ComplexAckPDU(_APDU):
pduType = 3
def __init__(self, choice=None, invokeID=None, context=None, *args, **kwargs):
if _debug: ComplexAckPDU._debug("__init__ %r %r %r %r %r", choice, invokeID, context, args, kwargs)
super(ComplexAckPDU, self).__init__(*args, **kwargs)
self.apduType = ComplexAckPDU.pduType
self.apduService = choice
self.apduInvokeID = invokeID
# use the context to fill in most of the fields
if context is not None:
self.apduService = context.apduService
self.set_context(context)
register_apdu_type(ComplexAckPDU)
#
# SegmentAckPDU
#
@bacpypes_debugging
class SegmentAckPDU(_APDU):
pduType = 4
def __init__(self, nak=None, srv=None, invokeID=None, sequenceNumber=None, windowSize=None, *args, **kwargs):
if _debug: SegmentAckPDU._debug("__init__ %r %r %r %r %r %r %r", nak, srv, invokeID, sequenceNumber, windowSize, args, kwargs)
super(SegmentAckPDU, self).__init__(*args, **kwargs)
self.apduType = SegmentAckPDU.pduType
self.apduNak = nak
self.apduSrv = srv
self.apduInvokeID = invokeID
self.apduSeq = sequenceNumber
self.apduWin = windowSize
register_apdu_type(SegmentAckPDU)
#
# ErrorPDU
#
@bacpypes_debugging
class ErrorPDU(_APDU):
pduType = 5
def __init__(self, choice=None, invokeID=None, context=None, *args, **kwargs):
if _debug: ErrorPDU._debug("__init__ %r %r %r %r %r", choice, invokeID, context, args, kwargs)
super(ErrorPDU, self).__init__(*args, **kwargs)
self.apduType = ErrorPDU.pduType
self.apduService = choice
self.apduInvokeID = invokeID
# use the context to fill in most of the fields
if context is not None:
self.apduService = context.apduService
self.set_context(context)
register_apdu_type(ErrorPDU)
#
# RejectPDU
#
class RejectReason(Enumerated):
vendor_range = (64, 255)
enumerations = \
{ 'other':0
, 'bufferOverflow':1
, 'inconsistentParameters':2
, 'invalidParameterDatatype':3
, 'invalidTag':4
, 'missingRequiredParameter':5
, 'parameterOutOfRange':6
, 'tooManyArguments':7
, 'undefinedEnumeration':8
, 'unrecognizedService':9
}
expand_enumerations(RejectReason)
@bacpypes_debugging
class RejectPDU(_APDU):
pduType = 6
def __init__(self, invokeID=None, reason=None, context=None, *args, **kwargs):
if _debug: RejectPDU._debug("__init__ %r %r %r %r %r", invokeID, reason, context, args, kwargs)
super(RejectPDU, self).__init__(*args, **kwargs)
self.apduType = RejectPDU.pduType
self.apduInvokeID = invokeID
if isinstance(reason, str):
reason = RejectReason(reason).get_long()
self.apduAbortRejectReason = reason
# use the context to fill in most of the fields
if context is not None:
self.set_context(context)
register_apdu_type(RejectPDU)
#
# AbortPDU
#
class AbortReason(Enumerated):
vendor_range = (64, 255)
enumerations = \
{ 'other':0
, 'bufferOverflow':1
, 'invalidApduInThisState':2
, 'preemptedByHigherPriorityTask':3 #wtm corrected spelling
, 'segmentationNotSupported':4
, 'securityError':5
, 'insufficientSecurity':6
, 'windowSizeOutOfRange':7
, 'applicationExceededReplyTime':8
, 'outOfResources':9
, 'tsmTimeout':10
, 'apduTooLong':11
# 64..255 are available for vendor codes
, 'serverTimeout':64
, 'noResponse':65
}
expand_enumerations(AbortReason)
@bacpypes_debugging
class AbortPDU(_APDU):
pduType = 7
def __init__(self, srv=None, invokeID=None, reason=None, context=None, *args, **kwargs):
if _debug: AbortPDU._debug("__init__ %r %r %r %r %r %r", srv, invokeID, reason, context, args, kwargs)
super(AbortPDU, self).__init__(*args, **kwargs)
self.apduType = AbortPDU.pduType
self.apduSrv = srv
self.apduInvokeID = invokeID
if isinstance(reason, str):
reason = AbortReason(reason).get_long()
self.apduAbortRejectReason = reason
# use the context to fill in most of the fields
if context is not None:
self.set_context(context)
def __str__(self):
try:
reason = AbortReason._xlate_table[self.apduAbortRejectReason]
except:
reason = str(self.apduAbortRejectReason) + '?'
return reason
register_apdu_type(AbortPDU)
#------------------------------
#
# APCISequence
#
@bacpypes_debugging
class APCISequence(APCI, Sequence):
def __init__(self, *args, **kwargs):
if _debug: APCISequence._debug("__init__ %r %r", args, kwargs)
super(APCISequence, self).__init__(*args, **kwargs)
# start with an empty tag list
self._tag_list = None
def encode(self, apdu):
if _debug: APCISequence._debug("encode %r", apdu)
# copy the header fields
apdu.update(self)
# create a tag list
self._tag_list = TagList()
Sequence.encode(self, self._tag_list)
# encode the tag list
self._tag_list.encode(apdu)
def decode(self, apdu):
if _debug: APCISequence._debug("decode %r", apdu)
# copy the header fields
self.update(apdu)
# create a tag list and decode the rest of the data
self._tag_list = TagList()
self._tag_list.decode(apdu)
if _debug: APCISequence._debug(" - tag list: %r", self._tag_list)
# pass the taglist to the Sequence for additional decoding
Sequence.decode(self, self._tag_list)
# trailing unmatched tags
if self._tag_list:
if _debug: APCISequence._debug(" - trailing unmatched tags")
raise TooManyArguments()
def apdu_contents(self, use_dict=None, as_class=dict):
"""Return the contents of an object as a dict."""
if _debug: APCISequence._debug("apdu_contents use_dict=%r as_class=%r", use_dict, as_class)
# make/extend the dictionary of content
if use_dict is None:
use_dict = as_class()
# set the function based on the class name
use_dict.__setitem__('function', self.__class__.__name__)
# fill in from the sequence contents
Sequence.dict_contents(self, use_dict=use_dict, as_class=as_class)
# return what we built/updated
return use_dict
#
# ConfirmedRequestSequence
#
@bacpypes_debugging
class ConfirmedRequestSequence(APCISequence, ConfirmedRequestPDU):
serviceChoice = None
def __init__(self, *args, **kwargs):
if _debug: ConfirmedRequestSequence._debug("__init__ %r %r", args, kwargs)
super(ConfirmedRequestSequence, self).__init__(*args, choice=self.serviceChoice, **kwargs)
#
# ComplexAckSequence
#
@bacpypes_debugging
class ComplexAckSequence(APCISequence, ComplexAckPDU):
serviceChoice = None
def __init__(self, *args, **kwargs):
if _debug: ComplexAckSequence._debug("__init__ %r %r", args, kwargs)
super(ComplexAckSequence, self).__init__(*args, choice=self.serviceChoice, **kwargs)
#
# UnconfirmedRequestSequence
#
@bacpypes_debugging
class UnconfirmedRequestSequence(APCISequence, UnconfirmedRequestPDU):
serviceChoice = None
def __init__(self, *args, **kwargs):
if _debug: UnconfirmedRequestSequence._debug("__init__ %r %r", args, kwargs)
super(UnconfirmedRequestSequence, self).__init__(*args, choice=self.serviceChoice, **kwargs)
#
# ErrorSequence
#
@bacpypes_debugging
class ErrorSequence(APCISequence, ErrorPDU):
serviceChoice = None
def __init__(self, *args, **kwargs):
if _debug: ErrorSequence._debug("__init__ %r %r", args, kwargs)
super(ErrorSequence, self).__init__(*args, choice=self.serviceChoice, **kwargs)
#------------------------------
class Error(ErrorSequence):
sequenceElements = ErrorType.sequenceElements
def __str__(self):
return str(self.errorClass) + ": " + str(self.errorCode)
error_types[12] = Error
error_types[14] = Error
class ChangeListError(ErrorSequence):
sequenceElements = \
[ Element('errorType', ErrorType, 0)
, Element('firstFailedElementNumber', Unsigned, 1)
]
def __str__(self):
return "change list error, first failed element number " + str(self.firstFailedElementNumber)
error_types[8] = ChangeListError
error_types[9] = ChangeListError
class CreateObjectError(ErrorSequence):
sequenceElements = \
[ Element('errorType', ErrorType, 0)
, Element('firstFailedElementNumber', Unsigned, 1)
]
def __str__(self):
return "create object error, first failed element number " + str(self.firstFailedElementNumber)
error_types[10] = CreateObjectError
class ConfirmedPrivateTransferError(ErrorSequence):
sequenceElements = \
[ Element('errorType', ErrorType, 0)
, Element('vendorID', Unsigned, 1)
, Element('serviceNumber', Unsigned, 2)
, Element('errorParameters', Any, 3, True)
]
error_types[18] = ConfirmedPrivateTransferError
class WritePropertyMultipleError(ErrorSequence):
sequenceElements = \
[ Element('errorType', ErrorType, 0)
, Element('firstFailedWriteAttempt', ObjectPropertyReference, 1)
]
error_types[16] = WritePropertyMultipleError
class VTCloseError(ErrorSequence):
sequenceElements = \
[ Element('errorType', ErrorType, 0)
, Element('listOfVTSessionIdentifiers', SequenceOf(Unsigned), 1, True)
]
error_types[22] = VTCloseError
#-----
class ReadPropertyRequest(ConfirmedRequestSequence):
serviceChoice = 12
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
]
register_confirmed_request_type(ReadPropertyRequest)
class ReadPropertyACK(ComplexAckSequence):
serviceChoice = 12
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('propertyValue', Any, 3)
]
register_complex_ack_type(ReadPropertyACK)
#-----
class ReadAccessSpecification(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('listOfPropertyReferences', SequenceOf(PropertyReference), 1)
]
class ReadPropertyMultipleRequest(ConfirmedRequestSequence):
serviceChoice = 14
sequenceElements = \
[ Element('listOfReadAccessSpecs', SequenceOf(ReadAccessSpecification))
]
register_confirmed_request_type(ReadPropertyMultipleRequest)
class ReadAccessResultElementChoice(Choice):
choiceElements = \
[ Element('propertyValue', Any, 4)
, Element('propertyAccessError', ErrorType, 5)
]
class ReadAccessResultElement(Sequence):
sequenceElements = \
[ Element('propertyIdentifier', PropertyIdentifier, 2)
, Element('propertyArrayIndex', Unsigned, 3, True)
, Element('readResult', ReadAccessResultElementChoice)
]
class ReadAccessResult(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('listOfResults', SequenceOf(ReadAccessResultElement), 1)
]
class ReadPropertyMultipleACK(ComplexAckSequence):
serviceChoice = 14
sequenceElements = \
[ Element('listOfReadAccessResults', SequenceOf(ReadAccessResult))
]
register_complex_ack_type(ReadPropertyMultipleACK)
#-----
class RangeByPosition(Sequence):
sequenceElements = \
[ Element('referenceIndex', Unsigned)
, Element('count', Integer)
]
class RangeBySequenceNumber(Sequence):
sequenceElements = \
[ Element('referenceSequenceNumber', Unsigned)
, Element('count', Integer)
]
class RangeByTime(Sequence):
sequenceElements = \
[ Element('referenceTime', DateTime)
, Element('count', Integer)
]
class Range(Choice):
choiceElements = \
[ Element('byPosition', RangeByPosition, 3)
, Element('bySequenceNumber', RangeBySequenceNumber, 6)
, Element('byTime', RangeByTime, 7)
]
class ReadRangeRequest(ConfirmedRequestSequence):
serviceChoice = 26
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('range', Range, optional=True)
]
register_confirmed_request_type(ReadRangeRequest)
class ReadRangeACK(ComplexAckSequence):
serviceChoice = 26
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('resultFlags', ResultFlags, 3)
, Element('itemCount', Unsigned, 4)
, Element('itemData', SequenceOf(Any), 5)
, Element('firstSequenceNumber', Unsigned, 6, True)
]
register_complex_ack_type(ReadRangeACK)
#-----
class WritePropertyRequest(ConfirmedRequestSequence):
serviceChoice = 15
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('propertyValue', Any, 3)
, Element('priority', Integer, 4, True)
]
register_confirmed_request_type(WritePropertyRequest)
#-----
class WriteAccessSpecification(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('listOfProperties', SequenceOf(PropertyValue), 1)
]
class WritePropertyMultipleRequest(ConfirmedRequestSequence):
serviceChoice = 16
sequenceElements = \
[ Element('listOfWriteAccessSpecs', SequenceOf(WriteAccessSpecification))
]
register_confirmed_request_type(WritePropertyMultipleRequest)
#-----
class GroupChannelValue(Sequence):
sequenceElements = \
[ Element('channel', Unsigned, 0)
, Element('overridingPriority', Unsigned, 1, True)
, Element('value', ChannelValue)
]
class WriteGroupRequest(UnconfirmedRequestSequence):
serviceChoice = 10
sequenceElements = \
[ Element('groupNumber', Unsigned, 0)
, Element('writePriority', Unsigned, 1)
, Element('changeList', SequenceOf(GroupChannelValue), 2)
, Element('inhibitDelay', Boolean, 3, True)
]
register_unconfirmed_request_type(WriteGroupRequest)
#-----
class IAmRequest(UnconfirmedRequestSequence):
serviceChoice = 0
sequenceElements = \
[ Element('iAmDeviceIdentifier', ObjectIdentifier)
, Element('maxAPDULengthAccepted', Unsigned)
, Element('segmentationSupported', Segmentation)
, Element('vendorID', Unsigned)
]
register_unconfirmed_request_type(IAmRequest)
#-----
class IHaveRequest(UnconfirmedRequestSequence):
serviceChoice = 1
sequenceElements = \
[ Element('deviceIdentifier', ObjectIdentifier)
, Element('objectIdentifier', ObjectIdentifier)
, Element('objectName', CharacterString)
]
register_unconfirmed_request_type(IHaveRequest)
#-----
class WhoHasLimits(Sequence):
sequenceElements = \
[ Element('deviceInstanceRangeLowLimit', Unsigned, 0)
, Element('deviceInstanceRangeHighLimit', Unsigned, 1)
]
class WhoHasObject(Choice):
choiceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 2)
, Element('objectName', CharacterString, 3)
]
class WhoHasRequest(UnconfirmedRequestSequence):
serviceChoice = 7
sequenceElements = \
[ Element('limits', WhoHasLimits, None, True)
, Element('object', WhoHasObject)
]
register_unconfirmed_request_type(WhoHasRequest)
#-----
class WhoIsRequest(UnconfirmedRequestSequence):
serviceChoice = 8
sequenceElements = \
[ Element('deviceInstanceRangeLowLimit', Unsigned, 0, True)
, Element('deviceInstanceRangeHighLimit', Unsigned, 1, True)
]
register_unconfirmed_request_type(WhoIsRequest)
#-----
class EventNotificationParameters(Sequence):
sequenceElements = \
[ Element('processIdentifier', Unsigned, 0)
, Element('initiatingDeviceIdentifier', ObjectIdentifier, 1)
, Element('eventObjectIdentifier', ObjectIdentifier, 2)
, Element('timeStamp', TimeStamp, 3)
, Element('notificationClass', Unsigned, 4)
, Element('priority', Unsigned, 5)
, Element('eventType', EventType, 6)
, Element('messageText', CharacterString, 7, True)
, Element('notifyType', NotifyType, 8)
, Element('ackRequired', Boolean, 9, True)
, Element('fromState', EventState, 10, True)
, Element('toState', EventState, 11)
, Element('eventValues', NotificationParameters, 12, True)
]
class ConfirmedEventNotificationRequest(ConfirmedRequestSequence):
serviceChoice = 2
sequenceElements = EventNotificationParameters.sequenceElements
register_confirmed_request_type(ConfirmedEventNotificationRequest)
class UnconfirmedEventNotificationRequest(UnconfirmedRequestSequence):
serviceChoice = 3
sequenceElements = EventNotificationParameters.sequenceElements
register_unconfirmed_request_type(UnconfirmedEventNotificationRequest)
#-----
class COVNotificationParameters(Sequence):
sequenceElements = \
[ Element('subscriberProcessIdentifier', Unsigned, 0)
, Element('initiatingDeviceIdentifier', ObjectIdentifier, 1)
, Element('monitoredObjectIdentifier', ObjectIdentifier, 2)
, Element('timeRemaining', Unsigned, 3)
, Element('listOfValues', SequenceOf(PropertyValue), 4)
]
class ConfirmedCOVNotificationRequest(ConfirmedRequestSequence):
serviceChoice = 1
sequenceElements = COVNotificationParameters.sequenceElements
register_confirmed_request_type(ConfirmedCOVNotificationRequest)
class UnconfirmedCOVNotificationRequest(UnconfirmedRequestSequence):
serviceChoice = 2
sequenceElements = COVNotificationParameters.sequenceElements
register_unconfirmed_request_type(UnconfirmedCOVNotificationRequest)
#-----
class UnconfirmedPrivateTransferRequest(UnconfirmedRequestSequence):
serviceChoice = 4
sequenceElements = \
[ Element('vendorID', Unsigned, 0)
, Element('serviceNumber', Unsigned, 1)
, Element('serviceParameters', Any, 2, True)
]
register_unconfirmed_request_type(UnconfirmedPrivateTransferRequest)
#-----
class UnconfirmedTextMessageRequestMessageClass(Choice):
choiceElements = \
[ Element('numeric', Unsigned, 0)
, Element('character', CharacterString, 1)
]
class UnconfirmedTextMessageRequestMessagePriority(Enumerated):
enumerations = \
{ 'normal':0
, 'urgent':1
}
class UnconfirmedTextMessageRequest(UnconfirmedRequestSequence):
serviceChoice = 5
sequenceElements = \
[ Element('textMessageSourceDevice', ObjectIdentifier, 0)
, Element('messageClass', UnconfirmedTextMessageRequestMessageClass, 1, True)
, Element('messagePriority', UnconfirmedTextMessageRequestMessagePriority, 2)
, Element('message', CharacterString, 3)
]
register_unconfirmed_request_type(UnconfirmedTextMessageRequest)
#-----
class TimeSynchronizationRequest(UnconfirmedRequestSequence):
serviceChoice = 6
sequenceElements = \
[ Element('time', DateTime)
]
register_unconfirmed_request_type(TimeSynchronizationRequest)
#-----
class UTCTimeSynchronizationRequest(UnconfirmedRequestSequence):
serviceChoice = 9
sequenceElements = \
[ Element('time', DateTime)
]
register_unconfirmed_request_type(UTCTimeSynchronizationRequest)
#-----
class AcknowledgeAlarmRequest(ConfirmedRequestSequence):
serviceChoice = 0
sequenceElements = \
[ Element('acknowledgingProcessIdentifier', Unsigned, 0)
, Element('eventObjectIdentifier', ObjectIdentifier, 1)
, Element('eventStateAcknowledged', EventState, 2)
, Element('timeStamp', TimeStamp, 3)
, Element('acknowledgmentSource', CharacterString, 4)
, Element('timeOfAcknowledgment', TimeStamp, 5)
]
register_confirmed_request_type(AcknowledgeAlarmRequest)
#-----
class GetAlarmSummaryRequest(ConfirmedRequestSequence):
serviceChoice = 3
sequenceElements = \
[
]
register_confirmed_request_type(GetAlarmSummaryRequest)
class GetAlarmSummaryAlarmSummary(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier,)
, Element('alarmState', EventState,)
, Element('acknowledgedTransitions', EventTransitionBits)
]
class GetAlarmSummaryACK(ComplexAckSequence):
serviceChoice = 3
sequenceElements = \
[ Element('listOfAlarmSummaries', SequenceOf(GetAlarmSummaryAlarmSummary))
]
register_complex_ack_type(GetAlarmSummaryACK)
#-----
class GetEnrollmentSummaryRequestAcknowledgmentFilterType(Enumerated):
enumerations = \
{ 'all':0
, 'acked':1
, 'notAcked':2
}
class GetEnrollmentSummaryRequestEventStateFilterType(Enumerated):
enumerations = \
{ 'offnormal':0
, 'fault':1
, 'normal':2
, 'all':3
, 'active':4
}
class GetEnrollmentSummaryRequestPriorityFilterType:
sequenceElements = \
[ Element('minPriority', Unsigned, 0)
, Element('maxPriority', Unsigned, 1)
]
class GetEnrollmentSummaryRequest(ConfirmedRequestSequence):
serviceChoice = 4
sequenceElements = \
[ Element('acknowledgmentFilter', GetEnrollmentSummaryRequestAcknowledgmentFilterType, 0)
, Element('enrollmentFilter', RecipientProcess, 1, True)
, Element('eventStateFilter', GetEnrollmentSummaryRequestEventStateFilterType, 2, True)
, Element('eventTypeFilter', EventType, 3, True)
, Element('priorityFilter', GetEnrollmentSummaryRequestPriorityFilterType, 4, True)
, Element('notificationClassFilter', Unsigned, 5, True)
]
register_confirmed_request_type(GetEnrollmentSummaryRequest)
class GetEnrollmentSummaryEnrollmentSummary(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier)
, Element('eventType', EventType)
, Element('eventState', EventState)
, Element('priority', Unsigned)
, Element('notificationClass', Unsigned, optional=True)
]
class GetEnrollmentSummaryACK(ComplexAckSequence):
serviceChoice = 4
sequenceElements = \
[ Element('listOfEnrollmentSummaries', SequenceOf(GetEnrollmentSummaryEnrollmentSummary))
]
register_complex_ack_type(GetEnrollmentSummaryACK)
#-----
class GetEventInformationRequest(ConfirmedRequestSequence):
serviceChoice = 29
sequenceElements = \
[ Element('lastReceivedObjectIdentifier', ObjectIdentifier, 0, True)
]
register_confirmed_request_type(GetEventInformationRequest)
class GetEventInformationEventSummary(Sequence):
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('eventState', EventState, 1)
, Element('acknowledgedTransitions', EventTransitionBits, 2)
, Element('eventTimeStamps', SequenceOf(TimeStamp), 3)
, Element('notifyType', NotifyType, 4)
, Element('eventEnable', EventTransitionBits, 5)
, Element('eventPriorities', SequenceOf(Unsigned), 6)
]
class GetEventInformationACK(ComplexAckSequence):
serviceChoice = 29
sequenceElements = \
[ Element('listOfEventSummaries', SequenceOf(GetEventInformationEventSummary), 0)
, Element('moreEvents', Boolean, 1)
]
register_complex_ack_type(GetEventInformationACK)
#-----
class LifeSafetyOperationRequest(ConfirmedRequestSequence):
serviceChoice = 27
sequenceElements = \
[ Element('requestingProcessIdentifier', Unsigned, 0)
, Element('requestingSource', CharacterString, 1)
, Element('request', LifeSafetyOperation, 2)
, Element('objectIdentifier', ObjectIdentifier, 3, True)
]
register_confirmed_request_type(LifeSafetyOperationRequest)
#-----
class SubscribeCOVRequest(ConfirmedRequestSequence):
serviceChoice = 5
sequenceElements = \
[ Element('subscriberProcessIdentifier', Unsigned, 0)
, Element('monitoredObjectIdentifier', ObjectIdentifier, 1)
, Element('issueConfirmedNotifications', Boolean, 2, True)
, Element('lifetime', Unsigned, 3, True)
]
register_confirmed_request_type(SubscribeCOVRequest)
#-----
class SubscribeCOVPropertyRequest(ConfirmedRequestSequence):
serviceChoice = 28
sequenceElements = \
[ Element('subscriberProcessIdentifier', Unsigned, 0)
, Element('monitoredObjectIdentifier', ObjectIdentifier, 1)
, Element('issueConfirmedNotifications', Boolean, 2, True)
, Element('lifetime', Unsigned, 3, True)
, Element('monitoredPropertyIdentifier', PropertyReference, 4)
, Element('covIncrement', Real, 5, True)
]
register_confirmed_request_type(SubscribeCOVPropertyRequest)
#-----
class AtomicReadFileRequestAccessMethodChoiceStreamAccess(Sequence):
sequenceElements = \
[ Element('fileStartPosition', Integer)
, Element('requestedOctetCount', Unsigned)
]
class AtomicReadFileRequestAccessMethodChoiceRecordAccess(Sequence):
sequenceElements = \
[ Element('fileStartRecord', Integer)
, Element('requestedRecordCount', Unsigned)
]
class AtomicReadFileRequestAccessMethodChoice(Choice):
choiceElements = \
[ Element('streamAccess', AtomicReadFileRequestAccessMethodChoiceStreamAccess, 0)
, Element('recordAccess', AtomicReadFileRequestAccessMethodChoiceRecordAccess, 1)
]
class AtomicReadFileRequest(ConfirmedRequestSequence):
serviceChoice = 6
sequenceElements = \
[ Element('fileIdentifier', ObjectIdentifier)
, Element('accessMethod', AtomicReadFileRequestAccessMethodChoice)
]
register_confirmed_request_type(AtomicReadFileRequest)
class AtomicReadFileACKAccessMethodStreamAccess(Sequence):
sequenceElements = \
[ Element('fileStartPosition', Integer)
, Element('fileData', OctetString)
]
class AtomicReadFileACKAccessMethodRecordAccess(Sequence):
sequenceElements = \
[ Element('fileStartRecord', Integer)
, Element('returnedRecordCount', Unsigned)
, Element('fileRecordData', SequenceOf(OctetString))
]
class AtomicReadFileACKAccessMethodChoice(Choice):
choiceElements = \
[ Element('streamAccess', AtomicReadFileACKAccessMethodStreamAccess, 0)
, Element('recordAccess', AtomicReadFileACKAccessMethodRecordAccess, 1)
]
class AtomicReadFileACK(ComplexAckSequence):
serviceChoice = 6
sequenceElements = \
[ Element('endOfFile', Boolean)
, Element('accessMethod', AtomicReadFileACKAccessMethodChoice)
]
register_complex_ack_type(AtomicReadFileACK)
#-----
class AtomicWriteFileRequestAccessMethodChoiceStreamAccess(Sequence):
sequenceElements = \
[ Element('fileStartPosition', Integer)
, Element('fileData', OctetString)
]
class AtomicWriteFileRequestAccessMethodChoiceRecordAccess(Sequence):
sequenceElements = \
[ Element('fileStartRecord', Integer)
, Element('recordCount', Unsigned)
, Element('fileRecordData', SequenceOf(OctetString))
]
class AtomicWriteFileRequestAccessMethodChoice(Choice):
choiceElements = \
[ Element('streamAccess', AtomicWriteFileRequestAccessMethodChoiceStreamAccess, 0)
, Element('recordAccess', AtomicWriteFileRequestAccessMethodChoiceRecordAccess, 1)
]
class AtomicWriteFileRequest(ConfirmedRequestSequence):
serviceChoice = 7
sequenceElements = \
[ Element('fileIdentifier', ObjectIdentifier)
, Element('accessMethod', AtomicWriteFileRequestAccessMethodChoice)
]
register_confirmed_request_type(AtomicWriteFileRequest)
class AtomicWriteFileACK(ComplexAckSequence):
serviceChoice = 7
sequenceElements = \
[ Element('fileStartPosition', Integer, 0, True)
, Element('fileStartRecord', Integer, 1, True)
]
register_complex_ack_type(AtomicWriteFileACK)
#-----
class AddListElementRequest(ConfirmedRequestSequence):
serviceChoice = 8
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('listOfElements', Any, 3)
]
register_confirmed_request_type(AddListElementRequest)
#-----
class CreateObjectRequestObjectSpecifier(Choice):
choiceElements = \
[ Element('objectType', ObjectType, 0)
, Element('objectIdentifier',ObjectIdentifier, 1)
]
class CreateObjectRequest(ConfirmedRequestSequence):
serviceChoice = 10
sequenceElements = \
[ Element('objectSpecifier', CreateObjectRequestObjectSpecifier, 0)
, Element('listOfInitialValues', SequenceOf(PropertyValue), 1, True)
]
register_confirmed_request_type(CreateObjectRequest)
class CreateObjectACK(ComplexAckSequence):
serviceChoice = 10
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier)
]
register_complex_ack_type(CreateObjectACK)
#-----
class DeleteObjectRequest(ConfirmedRequestSequence):
serviceChoice = 11
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier)
]
register_confirmed_request_type(DeleteObjectRequest)
#-----
class RemoveListElementRequest(ConfirmedRequestSequence):
serviceChoice = 9
sequenceElements = \
[ Element('objectIdentifier', ObjectIdentifier, 0)
, Element('propertyIdentifier', PropertyIdentifier, 1)
, Element('propertyArrayIndex', Unsigned, 2, True)
, Element('listOfElements', Any, 3)
]
register_confirmed_request_type(RemoveListElementRequest)
#-----
class DeviceCommunicationControlRequestEnableDisable(Enumerated):
enumerations = \
{ 'enable': 0
, 'disable': 1
, 'disableInitiation': 2
}
class DeviceCommunicationControlRequest(ConfirmedRequestSequence):
serviceChoice = 17
sequenceElements = \
[ Element('timeDuration', Unsigned, 0, True)
, Element('enableDisable', DeviceCommunicationControlRequestEnableDisable, 1, True)
, Element('password', CharacterString, 2, True)
]
register_confirmed_request_type(DeviceCommunicationControlRequest)
class ConfirmedPrivateTransferRequest(ConfirmedRequestSequence):
serviceChoice = 18
sequenceElements = \
[ Element('vendorID', Unsigned, 0)
, Element('serviceNumber', Unsigned, 1)
, Element('serviceParameters', Any, 2, True)
]
register_confirmed_request_type(ConfirmedPrivateTransferRequest)
class ConfirmedPrivateTransferACK(ComplexAckSequence):
serviceChoice = 18
sequenceElements = \
[ Element('vendorID', Unsigned, 0)
, Element('serviceNumber', Unsigned, 1)
, Element('resultBlock', Any, 2, True)
]
register_complex_ack_type(ConfirmedPrivateTransferACK)
#-----
class ConfirmedTextMessageRequestMessageClass(Choice):
choiceElements = \
[ Element('numeric', Unsigned, 0)
, Element('character', CharacterString, 1)
]
class ConfirmedTextMessageRequestMessagePriority(Enumerated):
enumerations = \
{ 'normal':0
, 'urgent':1
}
class ConfirmedTextMessageRequest(ConfirmedRequestSequence):
serviceChoice = 19
sequenceElements = \
[ Element('textMessageSourceDevice', ObjectIdentifier, 0)
, Element('messageClass', ConfirmedTextMessageRequestMessageClass, 1, True)
, Element('messagePriority', ConfirmedTextMessageRequestMessagePriority, 2)
, Element('message', CharacterString, 3)
]
register_confirmed_request_type(ConfirmedTextMessageRequest)
#-----
class ReinitializeDeviceRequestReinitializedStateOfDevice(Enumerated):
enumerations = \
{ 'coldstart':0
, 'warmstart':1
, 'startbackup':2
, 'endbackup':3
, 'startrestore':4
, 'endrestore':5
, 'abortrestore':6
}
class ReinitializeDeviceRequest(ConfirmedRequestSequence):
serviceChoice = 20
sequenceElements = \
[ Element('reinitializedStateOfDevice', ReinitializeDeviceRequestReinitializedStateOfDevice, 0)
, Element('password', CharacterString, 1, True)
]
register_confirmed_request_type(ReinitializeDeviceRequest)
#-----
class VTOpenRequest(ConfirmedRequestSequence):
serviceChoice = 21
sequenceElements = \
[ Element('vtClass', VTClass,)
, Element('localVTSessionIdentifier', Unsigned)
]
register_confirmed_request_type(VTOpenRequest)
class VTOpenACK(ComplexAckSequence):
serviceChoice = 21
sequenceElements = \
[ Element('remoteVTSessionIdentifier', Unsigned)
]
register_complex_ack_type(VTOpenACK)
class VTCloseRequest(ConfirmedRequestSequence):
serviceChoice = 22
sequenceElements = \
[ Element('listOfRemoteVTSessionIdentifiers', SequenceOf(Unsigned))
]
register_confirmed_request_type(VTCloseRequest)
class VTDataRequest(ConfirmedRequestSequence):
serviceChoice = 23
sequenceElements = \
[ Element('vtSessionIdentifier', Unsigned,)
, Element('vtNewData', OctetString)
, Element('vtDataFlag', Unsigned)
]
register_confirmed_request_type(VTDataRequest)
class VTDataACK(ComplexAckSequence):
serviceChoice = 23
sequenceElements = \
[ Element('allNewDataAccepted', Boolean, 0)
, Element('acceptedOctetCount', Unsigned, 1)
]
register_complex_ack_type(VTDataACK)
#-----
# removed in version 1, revision 11
class AuthenticateRequest(ConfirmedRequestSequence):
serviceChoice = 24
sequenceElements = \
[ Element('pseudoRandomNumber', Unsigned, 0)
, Element('expectedInvokeID', Unsigned, 1)
, Element('operatorName', CharacterString, 2)
, Element('operatorPassword', CharacterString, 3)
, Element('startEncipheredSession', Boolean, 4)
]
# removed in version 1, revision 11
class AuthenticateACK(ComplexAckSequence):
serviceChoice = 24
sequenceElements = \
[ Element('modifiedRandomNumber', Unsigned)
]
# removed in version 1, revision 11
class RequestKeyRequest(ConfirmedRequestSequence):
serviceChoice = 25
sequenceElements = \
[ Element('requestingDeviceIdentifier', ObjectIdentifier)
, Element('requestingDeviceAddress', DeviceAddress)
, Element('remoteDeviceIdentifier', ObjectIdentifier)
, Element('remoteDeviceAddress', DeviceAddress)
]
#-----------------------------------
class ConfirmedServiceChoice(Enumerated):
enumerations = {
# Alarm and Event Services
'acknowledgeAlarm':0,
'confirmedCOVNotification':1,
'confirmedEventNotification':2,
'getAlarmSummary':3,
'getEnrollmentSummary':4,
'getEventInformation':29,
'subscribeCOV':5,
'subscribeCOVProperty':28,
'lifeSafetyOperation':27,
# File Access Services
'atomicReadFile':6,
'atomicWriteFile':7,
# Object Access Services
'addListElement':8,
'removeListElement':9,
'createObject':10,
'deleteObject':11,
'readProperty':12,
'readPropertyMultiple':14,
'readRange':26,
'writeProperty':15,
'writePropertyMultiple':16,
# Remote Device Management Services
'deviceCommunicationControl':17,
'confirmedPrivateTransfer':18,
'confirmedTextMessage':19,
'reinitializeDevice':20,
# Virtual Terminal Services
'vtOpen':21,
'vtClose':22,
'vtData':23,
}
expand_enumerations(ConfirmedServiceChoice)
class UnconfirmedServiceChoice(Enumerated):
enumerations = {
'iAm':0,
'iHave':1,
'unconfirmedCOVNotification':2,
'unconfirmedEventNotification':3,
'unconfirmedPrivateTransfer':4,
'unconfirmedTextMessage':5,
'timeSynchronization':6,
'whoHas':7,
'whoIs':8,
'utcTimeSynchronization':9,
'writeGroup':10,
}
expand_enumerations(UnconfirmedServiceChoice)
| 31.153356 | 134 | 0.663979 |
acf2c208dd05361c84b4cd5bb17cf23083d0060b | 514 | py | Python | setup.py | AbhaAnand4/Python-Project1 | edb5592c0c6763e639eba054bdb8a44ba07e19d9 | [
"MIT"
] | null | null | null | setup.py | AbhaAnand4/Python-Project1 | edb5592c0c6763e639eba054bdb8a44ba07e19d9 | [
"MIT"
] | null | null | null | setup.py | AbhaAnand4/Python-Project1 | edb5592c0c6763e639eba054bdb8a44ba07e19d9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages
from printip import __version__
setup(
name="ip_print",
version=__version__,
description="Simple Python utility to print ip address",
author="Abha Anand",
author_email="anandabha6@gmail.com",
url="https://github.com/AbhaAnand4/ip-print",
packages=find_packages(include=['printip','printip*']),
entry_points={'console_scripts': ['ip_print=printip.ip_print:main']}
) | 28.555556 | 72 | 0.70428 |
acf2c21006beff4d9e4bd57c1614ccbc4058d23c | 990 | py | Python | setup.py | erose1337/versionhelper | 15c643c6321a0427f88ad2803bd592eafa1cdb7b | [
"MIT"
] | 1 | 2019-02-25T14:14:21.000Z | 2019-02-25T14:14:21.000Z | setup.py | erose1337/versionhelper | 15c643c6321a0427f88ad2803bd592eafa1cdb7b | [
"MIT"
] | null | null | null | setup.py | erose1337/versionhelper | 15c643c6321a0427f88ad2803bd592eafa1cdb7b | [
"MIT"
] | null | null | null | from setuptools import setup
options = {"name" : "versionhelper",
"description" : "Automatically increment semantic version number according to changes in the code and API",
#"long_description" : '',
#"url" : "",
#"download_url" : "",
"author" : "Ella Rose",
"author_email" : "python_pride@protonmail.com",
"packages" : ["versionhelper"],
"classifiers" : ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules"]
}
if __name__ == "__main__":
setup(**options)
| 45 | 118 | 0.489899 |
acf2c22c7435dc4ff8d0ba34931101ec98e7267c | 16,899 | py | Python | yolov5/utils/wandb_logging/wandb_utils.py | hjsg1010/food_menu_detection | c8d8465ba3020b81ad0a117fbb840a7f1799678d | [
"RSA-MD"
] | null | null | null | yolov5/utils/wandb_logging/wandb_utils.py | hjsg1010/food_menu_detection | c8d8465ba3020b81ad0a117fbb840a7f1799678d | [
"RSA-MD"
] | null | null | null | yolov5/utils/wandb_logging/wandb_utils.py | hjsg1010/food_menu_detection | c8d8465ba3020b81ad0a117fbb840a7f1799678d | [
"RSA-MD"
] | null | null | null | """Utilities and tools for tracking runs with Weights & Biases."""
import json
import sys
from pathlib import Path
import torch
import yaml
from tqdm import tqdm
sys.path.append(str(Path(__file__).parent.parent.parent)) # add utils/ to path
from utils.datasets import LoadImagesAndLabels
from utils.datasets import img2label_paths
from utils.general import colorstr, xywh2xyxy, check_dataset, check_file
try:
import wandb
from wandb import init, finish
except ImportError:
wandb = None
WANDB_ARTIFACT_PREFIX = 'wandb-artifact://'
def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX):
return from_string[len(prefix):]
def check_wandb_config_file(data_config_file):
wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path
if Path(wandb_config).is_file():
return wandb_config
return data_config_file
def get_run_info(run_path):
run_path = Path(remove_prefix(run_path, WANDB_ARTIFACT_PREFIX))
run_id = run_path.stem
project = run_path.parent.stem
entity = run_path.parent.parent.stem
model_artifact_name = 'run_' + run_id + '_model'
return entity, project, run_id, model_artifact_name
def check_wandb_resume(opt):
process_wandb_config_ddp_mode(opt) if opt.global_rank not in [-1, 0] else None
if isinstance(opt.resume, str):
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
if opt.global_rank not in [-1, 0]: # For resuming DDP runs
entity, project, run_id, model_artifact_name = get_run_info(opt.resume)
api = wandb.Api()
artifact = api.artifact(entity + '/' + project + '/' + model_artifact_name + ':latest')
modeldir = artifact.download()
opt.weights = str(Path(modeldir) / "last.pt")
return True
return None
def process_wandb_config_ddp_mode(opt):
with open(check_file(opt.data)) as f:
data_dict = yaml.safe_load(f) # data dict
train_dir, val_dir = None, None
if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX):
api = wandb.Api()
train_artifact = api.artifact(remove_prefix(data_dict['train']) + ':' + opt.artifact_alias)
train_dir = train_artifact.download()
train_path = Path(train_dir) / 'data/images/'
data_dict['train'] = str(train_path)
if isinstance(data_dict['val'], str) and data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX):
api = wandb.Api()
val_artifact = api.artifact(remove_prefix(data_dict['val']) + ':' + opt.artifact_alias)
val_dir = val_artifact.download()
val_path = Path(val_dir) / 'data/images/'
data_dict['val'] = str(val_path)
if train_dir or val_dir:
ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml')
with open(ddp_data_path, 'w') as f:
yaml.safe_dump(data_dict, f)
opt.data = ddp_data_path
class WandbLogger():
"""Log training runs, datasets, models, and predictions to Weights & Biases.
This logger sends information to W&B at wandb.ai. By default, this information
includes hyperparameters, system configuration and metrics, model metrics,
and basic data metrics and analyses.
By providing additional command line arguments to train.py, datasets,
models and predictions can also be logged.
For more on how this logger is used, see the Weights & Biases documentation:
https://docs.wandb.com/guides/integrations/yolov5
"""
def __init__(self, opt, name, run_id, data_dict, job_type='Training'):
# Pre-training routine --
self.job_type = job_type
self.wandb, self.wandb_run, self.data_dict = wandb, None if not wandb else wandb.run, data_dict
# It's more elegant to stick to 1 wandb.init call, but useful config data is overwritten in the WandbLogger's wandb.init call
if isinstance(opt.resume, str): # checks resume from artifact
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
entity, project, run_id, model_artifact_name = get_run_info(opt.resume)
model_artifact_name = WANDB_ARTIFACT_PREFIX + model_artifact_name
assert wandb, 'install wandb to resume wandb runs'
# Resume wandb-artifact:// runs here| workaround for not overwriting wandb.config
self.wandb_run = wandb.init(id=run_id, project=project, entity=entity, resume='allow')
opt.resume = model_artifact_name
elif self.wandb:
self.wandb_run = wandb.init(config=opt,
resume="allow",
project='yolov5_food' if opt.project == 'runs/train' else Path(opt.project).stem,
entity=opt.entity,
name=name,
job_type=job_type,
id=run_id) if not wandb.run else wandb.run
if self.wandb_run:
if self.job_type == 'Training':
if not opt.resume:
wandb_data_dict = self.check_and_upload_dataset(opt) if opt.upload_dataset else data_dict
# Info useful for resuming from artifacts
self.wandb_run.config.opt = vars(opt)
self.wandb_run.config.data_dict = wandb_data_dict
self.data_dict = self.setup_training(opt, data_dict)
if self.job_type == 'Dataset Creation':
self.data_dict = self.check_and_upload_dataset(opt)
else:
prefix = colorstr('wandb: ')
print(f"{prefix}Install Weights & Biases for YOLOv5 logging with 'pip install wandb' (recommended)")
def check_and_upload_dataset(self, opt):
assert wandb, 'Install wandb to upload dataset'
check_dataset(self.data_dict)
config_path = self.log_dataset_artifact(check_file(opt.data),
opt.single_cls,
'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem)
print("Created dataset config file ", config_path)
with open(config_path) as f:
wandb_data_dict = yaml.safe_load(f)
return wandb_data_dict
def setup_training(self, opt, data_dict):
self.log_dict, self.current_epoch, self.log_imgs = {}, 0, 16 # Logging Constants
self.bbox_interval = opt.bbox_interval
if isinstance(opt.resume, str):
modeldir, _ = self.download_model_artifact(opt)
if modeldir:
self.weights = Path(modeldir) / "last.pt"
config = self.wandb_run.config
opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp = str(
self.weights), config.save_period, config.total_batch_size, config.bbox_interval, config.epochs, \
config.opt['hyp']
data_dict = dict(self.wandb_run.config.data_dict) # eliminates the need for config file to resume
if 'val_artifact' not in self.__dict__: # If --upload_dataset is set, use the existing artifact, don't download
self.train_artifact_path, self.train_artifact = self.download_dataset_artifact(data_dict.get('train'),
opt.artifact_alias)
self.val_artifact_path, self.val_artifact = self.download_dataset_artifact(data_dict.get('val'),
opt.artifact_alias)
self.result_artifact, self.result_table, self.val_table, self.weights = None, None, None, None
if self.train_artifact_path is not None:
train_path = Path(self.train_artifact_path) / 'data/images/'
data_dict['train'] = str(train_path)
if self.val_artifact_path is not None:
val_path = Path(self.val_artifact_path) / 'data/images/'
data_dict['val'] = str(val_path)
self.val_table = self.val_artifact.get("val")
self.map_val_table_path()
if self.val_artifact is not None:
self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation")
self.result_table = wandb.Table(["epoch", "id", "prediction", "avg_confidence"])
if opt.bbox_interval == -1:
self.bbox_interval = opt.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1
return data_dict
def download_dataset_artifact(self, path, alias):
if isinstance(path, str) and path.startswith(WANDB_ARTIFACT_PREFIX):
artifact_path = Path(remove_prefix(path, WANDB_ARTIFACT_PREFIX) + ":" + alias)
dataset_artifact = wandb.use_artifact(artifact_path.as_posix())
assert dataset_artifact is not None, "'Error: W&B dataset artifact doesn\'t exist'"
datadir = dataset_artifact.download()
return datadir, dataset_artifact
return None, None
def download_model_artifact(self, opt):
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
model_artifact = wandb.use_artifact(remove_prefix(opt.resume, WANDB_ARTIFACT_PREFIX) + ":latest")
assert model_artifact is not None, 'Error: W&B model artifact doesn\'t exist'
modeldir = model_artifact.download()
epochs_trained = model_artifact.metadata.get('epochs_trained')
total_epochs = model_artifact.metadata.get('total_epochs')
is_finished = total_epochs is None
assert not is_finished, 'training is finished, can only resume incomplete runs.'
return modeldir, model_artifact
return None, None
def log_model(self, path, opt, epoch, fitness_score, best_model=False):
model_artifact = wandb.Artifact('run_' + wandb.run.id + '_model', type='model', metadata={
'original_url': str(path),
'epochs_trained': epoch + 1,
'save period': opt.save_period,
'project': opt.project,
'total_epochs': opt.epochs,
'fitness_score': fitness_score
})
model_artifact.add_file(str(path / 'last.pt'), name='last.pt')
wandb.log_artifact(model_artifact,
aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else ''])
print("Saving model artifact on epoch ", epoch + 1)
def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False):
with open(data_file) as f:
data = yaml.safe_load(f) # data dict
nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names'])
names = {k: v for k, v in enumerate(names)} # to index dictionary
self.train_artifact = self.create_dataset_table(LoadImagesAndLabels(
data['train'], rect=True, batch_size=1), names, name='train') if data.get('train') else None
self.val_artifact = self.create_dataset_table(LoadImagesAndLabels(
data['val'], rect=True, batch_size=1), names, name='val') if data.get('val') else None
if data.get('train'):
data['train'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'train')
if data.get('val'):
data['val'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'val')
path = data_file if overwrite_config else '_wandb.'.join(data_file.rsplit('.', 1)) # updated data.yaml path
data.pop('download', None)
with open(path, 'w') as f:
yaml.safe_dump(data, f)
if self.job_type == 'Training': # builds correct artifact pipeline graph
self.wandb_run.use_artifact(self.val_artifact)
self.wandb_run.use_artifact(self.train_artifact)
self.val_artifact.wait()
self.val_table = self.val_artifact.get('val')
self.map_val_table_path()
else:
self.wandb_run.log_artifact(self.train_artifact)
self.wandb_run.log_artifact(self.val_artifact)
return path
def map_val_table_path(self):
self.val_table_map = {}
print("Mapping dataset")
for i, data in enumerate(tqdm(self.val_table.data)):
self.val_table_map[data[3]] = data[0]
def create_dataset_table(self, dataset, class_to_id, name='dataset'):
# TODO: Explore multiprocessing to slpit this loop parallely| This is essential for speeding up the the logging
artifact = wandb.Artifact(name=name, type="dataset")
img_files = tqdm([dataset.path]) if isinstance(dataset.path, str) and Path(dataset.path).is_dir() else None
img_files = tqdm(dataset.img_files) if not img_files else img_files
for img_file in img_files:
if Path(img_file).is_dir():
artifact.add_dir(img_file, name='data/images')
labels_path = 'labels'.join(dataset.path.rsplit('images', 1))
artifact.add_dir(labels_path, name='data/labels')
else:
artifact.add_file(img_file, name='data/images/' + Path(img_file).name)
label_file = Path(img2label_paths([img_file])[0])
artifact.add_file(str(label_file),
name='data/labels/' + label_file.name) if label_file.exists() else None
table = wandb.Table(columns=["id", "train_image", "Classes", "name"])
class_set = wandb.Classes([{'id': id, 'name': name} for id, name in class_to_id.items()])
for si, (img, labels, paths, shapes) in enumerate(tqdm(dataset)):
box_data, img_classes = [], {}
for cls, *xywh in labels[:, 1:].tolist():
cls = int(cls)
box_data.append({"position": {"middle": [xywh[0], xywh[1]], "width": xywh[2], "height": xywh[3]},
"class_id": cls,
"box_caption": "%s" % (class_to_id[cls])})
img_classes[cls] = class_to_id[cls]
boxes = {"ground_truth": {"box_data": box_data, "class_labels": class_to_id}} # inference-space
table.add_data(si, wandb.Image(paths, classes=class_set, boxes=boxes), json.dumps(img_classes),
Path(paths).name)
artifact.add(table, name)
return artifact
def log_training_progress(self, predn, path, names):
if self.val_table and self.result_table:
class_set = wandb.Classes([{'id': id, 'name': name} for id, name in names.items()])
box_data = []
total_conf = 0
for *xyxy, conf, cls in predn.tolist():
if conf >= 0.25:
box_data.append(
{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]},
"class_id": int(cls),
"box_caption": "%s %.3f" % (names[cls], conf),
"scores": {"class_score": conf},
"domain": "pixel"})
total_conf = total_conf + conf
boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space
id = self.val_table_map[Path(path).name]
self.result_table.add_data(self.current_epoch,
id,
wandb.Image(self.val_table.data[id][1], boxes=boxes, classes=class_set),
total_conf / max(1, len(box_data))
)
def log(self, log_dict):
if self.wandb_run:
for key, value in log_dict.items():
self.log_dict[key] = value
def end_epoch(self, best_result=False):
if self.wandb_run:
wandb.log(self.log_dict)
self.log_dict = {}
if self.result_artifact:
train_results = wandb.JoinedTable(self.val_table, self.result_table, "id")
self.result_artifact.add(train_results, 'result')
wandb.log_artifact(self.result_artifact, aliases=['latest', 'last', 'epoch ' + str(self.current_epoch),
('best' if best_result else '')])
self.result_table = wandb.Table(["epoch", "id", "prediction", "avg_confidence"])
self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation")
def finish_run(self):
if self.wandb_run:
if self.log_dict:
wandb.log(self.log_dict)
wandb.run.finish()
| 52.974922 | 133 | 0.603349 |
acf2c2847440bddeb524c545917ed5241497083b | 57,618 | py | Python | api/azimuth/provider/openstack/provider.py | stackhpc/azimuth | be0ded7df3cb0c0381de302add0c386205533ac3 | [
"BSD-3-Clause"
] | 2 | 2022-01-14T14:45:18.000Z | 2022-02-05T17:35:58.000Z | api/azimuth/provider/openstack/provider.py | stackhpc/azimuth | be0ded7df3cb0c0381de302add0c386205533ac3 | [
"BSD-3-Clause"
] | 3 | 2022-02-11T18:41:01.000Z | 2022-02-21T18:00:20.000Z | api/azimuth/provider/openstack/provider.py | stackhpc/azimuth | be0ded7df3cb0c0381de302add0c386205533ac3 | [
"BSD-3-Clause"
] | 1 | 2021-12-02T13:22:09.000Z | 2021-12-02T13:22:09.000Z | """
This module contains the provider implementation for OpenStack.
"""
import base64
import dataclasses
import functools
import hashlib
import itertools
import logging
import random
import re
import dateutil.parser
import rackit
from ...cluster_engine.dto import Credential
from .. import base, errors, dto
from . import api
logger = logging.getLogger(__name__)
class Lazy:
"""
Wrapper around a function invocation that lazily evaluates the result
and caches it for future invocations.
"""
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self):
if not hasattr(self, "result"):
self.result = self.func(*self.args, **self.kwargs)
return self.result
_REPLACEMENTS = [
("instance", "machine"),
("Instance", "Machine"),
("flavorRef", "size"),
("flavor", "size"),
("Flavor", "Size"),
("Security group rule", "Firewall rule"),
]
def _replace_resource_names(message):
return functools.reduce(
lambda a, x: a.replace(x[0], x[1]),
_REPLACEMENTS,
message
)
def sanitise_username(username):
"""
Sanitise a username for use in a keypair name.
"""
return re.sub("[^a-zA-Z0-9]+", "-", username)
def convert_exceptions(f):
"""
Decorator that converts OpenStack API exceptions into errors from :py:mod:`..errors`.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except api.ServiceNotSupported as exc:
# Convert service not supported from the API module into unsupported operation
raise errors.UnsupportedOperationError(str(exc))
except rackit.ApiError as exc:
# Extract the status code and message
status_code = exc.status_code
# Replace the OpenStack resource names with ours
message = _replace_resource_names(str(exc))
if status_code == 400:
raise errors.BadInputError(message)
elif status_code == 401:
raise errors.AuthenticationError("Your session has expired.")
elif status_code == 403:
# Some quota exceeded errors get reported as permission denied (WHY???!!!)
# So report them as quota exceeded instead
if "exceeded" in message.lower():
raise errors.QuotaExceededError(
"Requested operation would exceed at least one quota. "
"Please check your tenancy quotas."
)
raise errors.PermissionDeniedError("Permission denied.")
elif status_code == 404:
raise errors.ObjectNotFoundError(message)
elif status_code == 409:
# 409 (Conflict) has a lot of different sub-errors depending on
# the actual error text
if "exceeded" in message.lower():
raise errors.QuotaExceededError(
"Requested operation would exceed at least one quota. "
"Please check your tenancy quotas."
)
raise errors.InvalidOperationError(message)
elif status_code == 413:
# The volume service uses 413 (Payload too large) for quota errors
if "exceedsavailablequota" in message.lower():
raise errors.QuotaExceededError(
"Requested operation would exceed at least one quota. "
"Please check your tenancy quotas."
)
raise errors.CommunicationError("Unknown error with OpenStack API.")
else:
raise errors.CommunicationError("Unknown error with OpenStack API.")
except rackit.RackitError as exc:
logger.exception("Could not connect to OpenStack API.")
raise errors.CommunicationError("Could not connect to OpenStack API.")
return wrapper
class Provider(base.Provider):
"""
Provider implementation for OpenStack.
Args:
auth_url: The Keystone v3 authentication URL.
domain: The domain to authenticate with (default ``Default``).
interface: The OpenStack interface to connect using (default ``public``).
metadata_prefix: The prefix to use for all Azimuth-related metadata (default ``azimuth_``).
internal_net_template: Template for the name of the internal network to use
(default ``None``).
The current tenancy name can be templated in using the
fragment ``{tenant_name}``.
external_net_template: Template for the name of the external network to use
(default ``None``).
The current tenancy name can be templated in using the
fragment ``{tenant_name}``.
create_internal_net: If ``True`` (the default), then the internal network is auto-created
when a tagged network or templated network cannot be found.
internal_net_cidr: The CIDR for the internal network when it is
auto-created (default ``192.168.3.0/24``).
az_backdoor_net_map: Mapping of availability zone to the UUID of the backdoor network
for that availability zone (default ``None``).
The backdoor network will only be attached if the image specifically
requests it. At that point, an availability zone will be randomly
selected, and if the network is not available an error will be raised.
backdoor_vnic_type: The ``binding:vnic_type`` for the backdoor network. If not given,
no vNIC type will be specified (default ``None``).
verify_ssl: If ``True`` (the default), verify SSL certificates. If ``False``
SSL certificates are not verified.
cluster_engine: The :py:class:`~..cluster.base.Engine` to use for clusters.
If not given, clusters are disabled.
"""
provider_name = "openstack"
def __init__(self, auth_url,
domain = "Default",
interface = "public",
metadata_prefix = "azimuth_",
internal_net_template = None,
external_net_template = None,
create_internal_net = True,
internal_net_cidr = "192.168.3.0/24",
az_backdoor_net_map = None,
backdoor_vnic_type = None,
verify_ssl = True,
cluster_engine = None):
# Strip any trailing slashes from the auth URL
self._auth_url = auth_url.rstrip("/")
self._domain = domain
self._interface = interface
self._metadata_prefix = metadata_prefix
self._internal_net_template = internal_net_template
self._external_net_template = external_net_template
self._create_internal_net = create_internal_net
self._internal_net_cidr = internal_net_cidr
self._az_backdoor_net_map = az_backdoor_net_map or dict()
self._backdoor_vnic_type = backdoor_vnic_type
self._verify_ssl = verify_ssl
self._cluster_engine = cluster_engine
@convert_exceptions
def authenticate(self, username, password):
"""
See :py:meth:`.base.Provider.authenticate`.
"""
logger.info("Authenticating user '%s' with OpenStack", username)
# Create an API connection using the username and password
auth_params = api.AuthParams().use_password(self._domain, username, password)
try:
conn = api.Connection(self._auth_url, auth_params, self._interface, self._verify_ssl)
except rackit.Unauthorized:
logger.info("Authentication failed for user '%s'", username)
# We want to use a different error message to convert_exceptions
raise errors.AuthenticationError("Invalid username or password.")
else:
logger.info("Sucessfully authenticated user '%s'", username)
return UnscopedSession(
conn,
metadata_prefix = self._metadata_prefix,
internal_net_template = self._internal_net_template,
external_net_template = self._external_net_template,
create_internal_net = self._create_internal_net,
internal_net_cidr = self._internal_net_cidr,
az_backdoor_net_map = self._az_backdoor_net_map,
backdoor_vnic_type = self._backdoor_vnic_type,
cluster_engine = self._cluster_engine
)
@convert_exceptions
def from_token(self, token):
"""
See :py:meth:`.base.Provider.from_token`.
"""
logger.info("Authenticating token with OpenStack")
auth_params = api.AuthParams().use_token(token)
try:
conn = api.Connection(self._auth_url, auth_params, self._interface, self._verify_ssl)
except (rackit.Unauthorized, rackit.NotFound):
logger.info("Authentication failed for token")
# Failing to validate a token is a 404 for some reason
raise errors.AuthenticationError("Your session has expired.")
else:
logger.info("Sucessfully authenticated user '%s'", conn.username)
return UnscopedSession(
conn,
metadata_prefix = self._metadata_prefix,
internal_net_template = self._internal_net_template,
external_net_template = self._external_net_template,
create_internal_net = self._create_internal_net,
internal_net_cidr = self._internal_net_cidr,
az_backdoor_net_map = self._az_backdoor_net_map,
backdoor_vnic_type = self._backdoor_vnic_type,
cluster_engine = self._cluster_engine
)
class UnscopedSession(base.UnscopedSession):
"""
Unscoped session implementation for OpenStack.
"""
provider_name = "openstack"
def __init__(self, connection,
metadata_prefix = "azimuth_",
internal_net_template = None,
external_net_template = None,
create_internal_net = True,
internal_net_cidr = "192.168.3.0/24",
az_backdoor_net_map = None,
backdoor_vnic_type = None,
cluster_engine = None):
self._connection = connection
self._metadata_prefix = metadata_prefix
self._internal_net_template = internal_net_template
self._external_net_template = external_net_template
self._create_internal_net = create_internal_net
self._internal_net_cidr = internal_net_cidr
self._az_backdoor_net_map = az_backdoor_net_map or dict()
self._backdoor_vnic_type = backdoor_vnic_type
self._cluster_engine = cluster_engine
def token(self):
"""
See :py:meth:`.base.UnscopedSession.token`.
"""
return self._connection.token
def username(self):
"""
See :py:meth:`.base.UnscopedSession.username`.
"""
return self._connection.username
def _log(self, message, *args, level = logging.INFO, **kwargs):
logger.log(level, "[%s] " + message, self.username(), *args, **kwargs)
def _scoped_connection_for_first_project(self):
"""
Returns a scoped connection for the user's first project.
"""
try:
project = next(self._connection.projects.all())
except StopIteration:
raise errors.InvalidOperationError("User does not belong to any projects.")
return self._connection.scoped_connection(project)
def capabilities(self):
"""
See :py:meth:`.base.UnscopedSession.capabilities`.
"""
# We need a scoped connection to query the service catalog
# If the user does not belong to any projects, use the default capabilties
try:
conn = self._scoped_connection_for_first_project()
except errors.InvalidOperationError:
return dto.Capabilities()
# Check if the relevant services are available to the project
try:
_ = conn.block_store
except api.ServiceNotSupported:
supports_volumes = False
else:
supports_volumes = True
return dto.Capabilities(supports_volumes = supports_volumes)
@convert_exceptions
def ssh_public_key(self, key_name):
"""
See :py:meth:`.base.UnscopedSession.ssh_public_key`.
"""
# Sanitise the requested name and try to find a keypair with that name
keypair_name = sanitise_username(key_name)
self._log("Attempting to locate keypair '%s'", keypair_name)
# In OpenStack, SSH keys are shared between projects
# So get a scoped connection for the user's first project to use
connection = self._scoped_connection_for_first_project()
keypair = connection.compute.keypairs.get(keypair_name)
# Return the public key associated with that key
return keypair.public_key
@convert_exceptions
def update_ssh_public_key(self, key_name, public_key):
"""
See :py:meth:`.base.UnscopedSession.update_ssh_public_key`.
"""
# Use the sanitised username as the keypair name
keypair_name = sanitise_username(key_name)
# In OpenStack, SSH keys are shared between projects
# So get a scoped connection for the user's first project to use
connection = self._scoped_connection_for_first_project()
# Keypairs are immutable in OpenStack, so we first remove the existing keypair
# If it doesn't exist, we can ignore that
try:
connection.compute.keypairs.delete(keypair_name)
except rackit.NotFound:
pass
else:
self._log("Deleted previous keypair '%s'", keypair_name)
# Create a new keypair with the same name but the new key
self._log("Creating keypair '%s'", keypair_name)
keypair = connection.compute.keypairs.create(
name = keypair_name,
public_key = public_key
)
return keypair.public_key
@convert_exceptions
def tenancies(self):
"""
See :py:meth:`.base.UnscopedSession.tenancies`.
"""
self._log("Fetching available tenancies")
projects = tuple(self._connection.projects.all())
self._log("Found %s projects", len(projects))
return tuple(dto.Tenancy(p.id, p.name) for p in projects if p.enabled)
@convert_exceptions
def scoped_session(self, tenancy):
"""
See :py:meth:`.base.UnscopedSession.scoped_session`.
"""
# Make sure we have a tenancy id
if not isinstance(tenancy, dto.Tenancy):
# There is no (obvious) way to list individual auth projects, so traverse the list
try:
tenancy = next(t for t in self.tenancies() if t.id == tenancy)
except StopIteration:
raise errors.ObjectNotFoundError(
"Could not find tenancy with ID {}.".format(tenancy)
)
self._log("Creating scoped session for project '%s'", tenancy.name)
try:
return ScopedSession(
self.username(),
tenancy,
self._connection.scoped_connection(tenancy.id),
metadata_prefix = self._metadata_prefix,
internal_net_template = self._internal_net_template,
external_net_template = self._external_net_template,
create_internal_net = self._create_internal_net,
internal_net_cidr = self._internal_net_cidr,
az_backdoor_net_map = self._az_backdoor_net_map,
backdoor_vnic_type = self._backdoor_vnic_type,
cluster_engine = self._cluster_engine
)
except (rackit.Unauthorized, rackit.Forbidden):
raise errors.ObjectNotFoundError(
"Could not find tenancy with ID {}.".format(tenancy.id)
)
def close(self):
"""
See :py:meth:`.base.UnscopedSession.close`.
"""
# Just close the underlying connection
self._connection.close()
class ScopedSession(base.ScopedSession):
"""
Tenancy-scoped session implementation for OpenStack.
"""
provider_name = "openstack"
def __init__(self, username,
tenancy,
connection,
metadata_prefix = "azimuth_",
internal_net_template = None,
external_net_template = None,
create_internal_net = True,
internal_net_cidr = "192.168.3.0/24",
az_backdoor_net_map = None,
backdoor_vnic_type = None,
cluster_engine = None):
self._username = username
self._tenancy = tenancy
self._connection = connection
self._metadata_prefix = metadata_prefix
self._internal_net_template = internal_net_template
self._external_net_template = external_net_template
self._create_internal_net = create_internal_net
self._internal_net_cidr = internal_net_cidr
self._az_backdoor_net_map = az_backdoor_net_map or dict()
self._backdoor_vnic_type = backdoor_vnic_type
self._cluster_engine = cluster_engine
def _log(self, message, *args, level = logging.INFO, **kwargs):
logger.log(
level,
"[%s] [%s] " + message,
self._username, self._tenancy.name, *args, **kwargs
)
def username(self):
"""
See :py:meth:`.base.ScopedSession.username`.
"""
return self._username
def tenancy(self):
"""
See :py:meth:`.base.ScopedSession.tenancy`.
"""
return self._tenancy
@convert_exceptions
def quotas(self):
"""
See :py:meth:`.base.ScopedSession.quotas`.
"""
self._log("Fetching tenancy quotas")
# Compute provides a way to fetch this information through the SDK, but
# the floating IP quota obtained through it is rubbish...
compute_limits = self._connection.compute.limits.absolute
quotas = [
dto.Quota(
"cpus",
None,
compute_limits.total_cores,
compute_limits.total_cores_used
),
dto.Quota(
"ram",
"MB",
compute_limits.total_ram,
compute_limits.total_ram_used
),
dto.Quota(
"machines",
None,
compute_limits.instances,
compute_limits.instances_used
),
]
# Get the floating ip quota
network_quotas = self._connection.network.quotas
quotas.append(
dto.Quota(
"external_ips",
None,
network_quotas.floatingip,
# Just get the length of the list of IPs
len(list(self._connection.network.floatingips.all()))
)
)
# The volume service is optional
# In the case where the service is not enabled, just don't add the quotas
try:
volume_limits = self._connection.block_store.limits.absolute
quotas.extend([
dto.Quota(
"storage",
"GB",
volume_limits.total_volume_gigabytes,
volume_limits.total_gigabytes_used
),
dto.Quota(
"volumes",
None,
volume_limits.volumes,
volume_limits.volumes_used
)
])
except api.ServiceNotSupported:
pass
return quotas
def _from_api_image(self, api_image):
"""
Converts an OpenStack API image object into a :py:class:`.dto.Image`.
"""
# Gather the metadata items with the specified prefix
# As well as the image metadata, we also treat tags with the specified prefix
# as metadata items with a value of "1"
metadata = {
key.removeprefix(self._metadata_prefix): value
for key, value in api_image._data.items()
if key.startswith(self._metadata_prefix)
}
metadata.update({
tag.removeprefix(self._metadata_prefix): "1"
for tag in getattr(api_image, "tags") or []
if tag.startswith(self._metadata_prefix)
})
return dto.Image(
api_image.id,
api_image.name,
api_image.visibility == "public",
# The image size is specified in bytes. Convert to MB.
float(api_image.size) / 1024.0 / 1024.0,
metadata = metadata
)
@convert_exceptions
def images(self):
"""
See :py:meth:`.base.ScopedSession.images`.
"""
self._log("Fetching available images")
# Fetch from the SDK using our custom image resource
# Exclude cluster images from the returned list
images = list(self._connection.image.images.all(
status = "active",
# Include community images in the returned list
visibility = "all",
member_status = "accepted"
))
self._log("Found %s images", len(images))
return tuple(self._from_api_image(i) for i in images)
@convert_exceptions
def find_image(self, id):
"""
See :py:meth:`.base.ScopedSession.find_image`.
"""
self._log("Fetching image with id '%s'", id)
# Just convert the SDK image to a DTO image
return self._from_api_image(self._connection.image.images.get(id))
def _from_api_flavor(self, api_flavor):
"""
Converts an OpenStack API flavor object into a :py:class:`.dto.Size`.
"""
return dto.Size(
api_flavor.id,
api_flavor.name,
api_flavor.vcpus,
api_flavor.ram,
api_flavor.disk
)
@convert_exceptions
def sizes(self):
"""
See :py:meth:`.base.ScopedSession.sizes`.
"""
self._log("Fetching available flavors")
flavors = tuple(
self._from_api_flavor(flavor)
for flavor in self._connection.compute.flavors.all()
if not flavor.is_disabled
)
self._log("Found %s flavors", len(flavors))
return flavors
@convert_exceptions
def find_size(self, id):
"""
See :py:meth:`.base.ScopedSession.find_size`.
"""
self._log("Fetching flavor with id '%s'", id)
return self._from_api_flavor(self._connection.compute.flavors.get(id))
def _tagged_network(self, net_type):
"""
Returns the first network with the given tag, or None if there is not one.
"""
tag = "portal-{}".format(net_type)
network = next(
# Consider all the networks available to the project, but use ones
# owned by the project in preference
itertools.chain(
self._connection.network.networks.all(tags = tag),
self._connection.network.networks.all(tags = tag, project_id = None),
),
None
)
if network:
self._log("Using tagged %s network '%s'", net_type, network.name)
else:
self._log("Failed to find tagged %s network.", net_type, level = logging.WARN)
return network
def _templated_network(self, template, net_type):
"""
Returns the network specified by the template, after interpolating with the tenant name.
If the network does not exist, that is a config error and an exception is raised.
"""
net_name = template.format(tenant_name = self._tenancy.name)
network = next(
# Consider all the networks available to the project, but use ones
# owned by the project in preference
itertools.chain(
self._connection.network.networks.all(name = net_name),
self._connection.network.networks.all(name = net_name, project_id = None),
),
None
)
if network:
self._log("Found %s network '%s' using template.", net_type, network.name)
return network
else:
self._log(
"Failed to find %s network '%s' from template.",
net_type,
net_name,
level = logging.ERROR
)
raise errors.InvalidOperationError("Could not find {} network.".format(net_type))
def _tenant_network(self, create_network = False):
"""
Returns the tenant internal network.
If create_network = True then an attempt is made to auto-create the networking.
If this fails then an exception is raised.
If create_network = False then None is returned when the network is not found.
"""
# First, try to find a network that is tagged as the portal internal network
tagged_network = self._tagged_network("internal")
if tagged_network:
return tagged_network
# Next, attempt to use the name template
if self._internal_net_template:
return self._templated_network(self._internal_net_template, "internal")
# If we get to here and are not creating a network, return
if not create_network:
return None
if self._create_internal_net:
# Unfortunately, the tags cannot be set in the POST request
self._log("Creating internal network")
network = self._connection.network.networks.create(name = "portal-internal")
network._update_tags(["portal-internal"])
# Create a subnet for the network
self._log("Creating subnet for network '%s'", network.name)
subnet = self._connection.network.subnets.create(
name = "portal-internal",
network_id = network.id,
ip_version = 4,
cidr = self._internal_net_cidr
)
# If we can find an external network, create a router that links the two
try:
external_network = self._external_network()
except errors.InvalidOperationError:
self._log(
"Failed to find external network",
level = logging.WARN,
exc_info = True
)
else:
self._log("Creating tenant router")
router = self._connection.network.routers.create(
name = "portal-router",
external_gateway_info = dict(network_id = external_network.id)
)
self._log("Attaching router to network '%s'", network.name)
router._add_interface(subnet_id = subnet.id)
return network
else:
raise errors.InvalidOperationError("Could not find internal network.")
def _external_network(self):
"""
Returns the external network that connects the tenant router to the outside world.
"""
# First, try to find a network that is tagged as the portal external network
tagged_network = self._tagged_network("external")
if tagged_network:
return tagged_network
# Next, attempt to use the name template
if self._external_net_template:
return self._templated_network(self._external_net_template, "external")
# If there is exactly one external network available, use that
params = { "router:external": True }
networks = (
list(self._connection.network.networks.all(**params)) +
list(self._connection.network.networks.all(**params, project_id = None))
)
if len(networks) == 1:
return networks[0]
elif len(networks) > 1:
raise errors.InvalidOperationError("Multiple external networks found.")
else:
raise errors.InvalidOperationError("Could not find external network.")
def _get_or_create_keypair(self, ssh_key):
"""
Returns a Nova keypair for the given SSH key.
"""
# Keypairs are immutable, i.e. once created cannot be changed
# We create keys with names of the form "<username>-<truncated fingerprint>",
# which allows for us to recognise when a user has changed their key and create
# a new one
fingerprint = hashlib.md5(base64.b64decode(ssh_key.split()[1])).hexdigest()
key_name = "{username}-{fingerprint}".format(
# Sanitise the username by replacing non-alphanumerics with -
username = sanitise_username(self._username),
# Truncate the fingerprint to 8 characters
fingerprint = fingerprint[:8]
)
try:
# We need to force a fetch so that the keypair is resolved
return self._connection.compute.keypairs.get(key_name, force = True)
except rackit.NotFound:
return self._connection.compute.keypairs.create(
name = key_name,
public_key = ssh_key
)
_POWER_STATES = {
0: "Unknown",
1: "Running",
3: "Paused",
4: "Shut down",
6: "Crashed",
7: "Suspended",
}
def _from_api_server(self, api_server, get_tenant_network):
"""
Returns a machine DTO for the given API server representation.
The additional arguments are the tenant network and an optional iterable of
the images for the tenancy (used to save fetching each image individually
when listing machines).
"""
status = api_server.status
fault = api_server.fault.get("message", None)
task = api_server.task_state
# Function to get the first IP of a particular type for a machine
# We prefer to get an IP on the specified tenant network, but if the machine is
# not connected to that network we just return the first IP
def ip_of_type(ip_type):
addresses_of_type = {}
for net, addresses in api_server.addresses.items():
for address in addresses:
if address["version"] == 4 and address["OS-EXT-IPS:type"] == ip_type:
addresses_of_type[net] = address["addr"]
break
# If the machine has more than one IP, attempt to select the one on the tenant net
if len(addresses_of_type) > 1:
tenant_network = get_tenant_network()
if tenant_network and tenant_network.name in addresses_of_type:
return addresses_of_type[tenant_network.name]
# Otherwise just return the first one
return next(iter(addresses_of_type.values()), None)
return dto.Machine(
api_server.id,
api_server.name,
getattr(api_server.image, "id", None),
getattr(api_server.flavor, "id", None),
dto.MachineStatus(
getattr(dto.MachineStatusType, status, dto.MachineStatusType.OTHER),
status,
_replace_resource_names(fault) if fault else None
),
self._POWER_STATES[api_server.power_state],
task.capitalize() if task else None,
ip_of_type("fixed"),
ip_of_type("floating"),
tuple(v["id"] for v in api_server.attached_volumes),
# Return only the metadata items with the specified prefix
{
key.removeprefix(self._metadata_prefix): value
for key, value in api_server.metadata.items()
if key.startswith(self._metadata_prefix)
},
api_server.user_id,
dateutil.parser.parse(api_server.created)
)
@convert_exceptions
def machines(self):
"""
See :py:meth:`.base.ScopedSession.machines`.
"""
self._log("Fetching available servers")
api_servers = tuple(self._connection.compute.servers.all())
self._log("Found %s servers", len(api_servers))
# Note that this will (a) only load the network if required and (b)
# reuse the network once loaded
get_tenant_network = Lazy(self._tenant_network)
return tuple(self._from_api_server(s, get_tenant_network) for s in api_servers)
@convert_exceptions
def find_machine(self, id):
"""
See :py:meth:`.base.ScopedSession.find_machine`.
"""
self._log("Fetching server with id '%s'", id)
server = self._connection.compute.servers.get(id)
# Don't discover the tenant network unless the server is found
get_tenant_network = Lazy(self._tenant_network)
return self._from_api_server(server, get_tenant_network)
@convert_exceptions
def fetch_logs_for_machine(self, machine):
"""
See :py:meth:`.base.ScopedSession.fetch_logs_for_machine`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Fetching logs for machine '%s'", machine)
logs = self._connection.compute.servers.get(machine).logs()
# Split the logs into lines before returning them
return logs.splitlines()
@convert_exceptions
def create_machine(self, name, image, size, ssh_key = None, metadata = None, userdata = None):
"""
See :py:meth:`.base.ScopedSession.create_machine`.
"""
# Start building the server params
params = dict(name = name)
# If an id was given, resolve it to an image
if not isinstance(image, dto.Image):
try:
image = self.find_image(image)
except errors.ObjectNotFoundError:
raise errors.BadInputError("Invalid image provided.")
params.update(image_id = str(image.id))
size = size.id if isinstance(size, dto.Size) else size
params.update(flavor_id = size)
self._log("Creating machine '%s' (image: %s, size: %s)", name, image.name, size)
# Get the networks to use
# Always use the tenant network, creating it if required
params.update(networks = [{ "uuid": self._tenant_network(True).id }])
# If the image asks for the backdoor network, attach it
if image.metadata.get(self._metadata_prefix + "private_if"):
if not self._az_backdoor_net_map:
raise errors.ImproperlyConfiguredError(
"Backdoor network required by image but not configured."
)
# Pick an availability zone at random
# random.choice needs something that supports indexing
choices = list(self._az_backdoor_net_map.items())
availability_zone, backdoor_net = random.choice(choices)
# If the availability zone is "nova" don't specify it, as per the advice
# in the OpenStack API documentation
if availability_zone != "nova":
params.update(availability_zone = availability_zone)
# Create a port on the backdoor network
port_params = dict(network_id = backdoor_net)
# If a vNIC type is specified, add it to the port parameters
if self._backdoor_vnic_type:
port_params["binding:vnic_type"] = self._backdoor_vnic_type
port = self._connection.network.ports.create(port_params)
params["networks"].append({ "port": port.id })
# Get the keypair to inject
if ssh_key:
keypair = self._get_or_create_keypair(ssh_key)
params.update(key_name = keypair.name)
# Build the machine metadata, starting with the tenant name
machine_metadata = { self._metadata_prefix + "tenant_name": self._tenancy.name }
# Copy metadata from the image
machine_metadata.update({
self._metadata_prefix + key: value
for key, value in image.metadata.items()
})
# Add any provided metadata to the default metadata
if metadata:
machine_metadata.update({
self._metadata_prefix + key: str(value)
for key, value in metadata.items()
})
params.update(metadata = machine_metadata)
# Add any user data script that was given - it must be base64-encoded
if userdata:
# The user data must be base64-encoded
userdata_b64 = base64.b64encode(userdata.encode()).decode()
params.update(user_data = userdata_b64)
server = self._connection.compute.servers.create(params)
return self.find_machine(server.id)
@convert_exceptions
def start_machine(self, machine):
"""
See :py:meth:`.base.ScopedSession.start_machine`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Starting machine '%s'", machine)
self._connection.compute.servers.get(machine).start()
return self.find_machine(machine)
@convert_exceptions
def stop_machine(self, machine):
"""
See :py:meth:`.base.ScopedSession.stop_machine`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Stopping machine '%s'", machine)
self._connection.compute.servers.get(machine).stop()
return self.find_machine(machine)
@convert_exceptions
def restart_machine(self, machine):
"""
See :py:meth:`.base.ScopedSession.restart_machine`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Restarting machine '%s'", machine)
self._connection.compute.servers.get(machine).reboot("SOFT")
return self.find_machine(machine)
@convert_exceptions
def delete_machine(self, machine):
"""
See :py:meth:`.base.ScopedSession.delete_machine`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Deleting machine '%s'", machine)
# First, delete any associated ports
for port in self._connection.network.ports.all(device_id = machine):
port._delete()
self._connection.compute.servers.delete(machine)
# Once the machine is deleted, delete the instance security group
secgroup_name = "instance-{}".format(machine)
secgroup = self._connection.network.security_groups.find_by_name(secgroup_name)
if secgroup:
secgroup._delete()
try:
return self.find_machine(machine)
except errors.ObjectNotFoundError:
return None
def _api_rule_is_supported(self, api_rule):
# Only consider IPv4 rules for protocols we recognise
return (
api_rule["ethertype"] == "IPv4" and
(
api_rule["protocol"] is None or
api_rule["protocol"].upper() in { p.name for p in dto.FirewallRuleProtocol }
)
)
def _from_api_security_group_rule(self, secgroup_names, api_rule):
params = dict(
id = api_rule["id"],
direction = dto.FirewallRuleDirection[api_rule["direction"].upper()],
protocol = (
dto.FirewallRuleProtocol[api_rule["protocol"].upper()]
if api_rule["protocol"] is not None
else dto.FirewallRuleProtocol.ANY
)
)
if api_rule["port_range_max"]:
params.update(
port_range = (
api_rule["port_range_min"],
api_rule["port_range_max"]
)
)
if api_rule["remote_group_id"]:
params.update(remote_group = secgroup_names[api_rule["remote_group_id"]])
else:
params.update(remote_cidr = api_rule["remote_ip_prefix"] or "0.0.0.0/0")
return dto.FirewallRule(**params)
@convert_exceptions
def fetch_firewall_rules_for_machine(self, machine):
machine = machine.id if isinstance(machine, dto.Machine) else machine
# All we get from the machine is security group names
# This means that we need to load all the security groups to find them
self._log("Fetching security groups")
security_groups = list(self._connection.network.security_groups.all())
# Index the names of the security groups so we can easily resolve them later
secgroup_names = { s.id: s.name for s in security_groups }
self._log("Filtering machine security groups for '%s'", machine)
# Filter the security groups that apply to the machine
machine = self._connection.compute.servers.get(machine)
machine_security_groups = [
group
for group in security_groups
if group.name in { sg["name"] for sg in machine.security_groups }
]
# The instance security group is the only editable one
instance_secgroup = "instance-{}".format(machine.id)
return [
dto.FirewallGroup(
name = group.name,
editable = group.name == instance_secgroup,
rules = [
self._from_api_security_group_rule(secgroup_names, rule)
for rule in group.security_group_rules
if self._api_rule_is_supported(rule)
]
)
for group in machine_security_groups
]
@convert_exceptions
def add_firewall_rule_to_machine(
self,
machine,
direction,
protocol,
port = None,
remote_cidr = None
):
machine = machine.id if isinstance(machine, dto.Machine) else machine
self._log("Finding instance security group for '%s'", machine)
secgroup_name = "instance-{}".format(machine)
secgroup = self._connection.network.security_groups.find_by_name(secgroup_name)
if secgroup:
self._log("Found existing security group '%s'", secgroup_name)
else:
self._log("Creating security group '%s'", secgroup_name)
secgroup = self._connection.network.security_groups.create(
name = secgroup_name,
description = "Instance rules for {}".format(machine)
)
# Delete the default rules
for rule in secgroup.security_group_rules:
self._connection.network.security_group_rules.delete(rule["id"])
self._connection.compute.servers.get(machine).add_security_group(secgroup.name)
# Now we have the group, we can add the rule
params = dict(
security_group_id = secgroup.id,
ethertype = "IPv4",
direction = "ingress" if direction is dto.FirewallRuleDirection.INBOUND else "egress"
)
if protocol != dto.FirewallRuleProtocol.ANY:
params.update(protocol = protocol.name.lower())
# Only use the port when protocol is UDP or TCP
if protocol.requires_port() and port:
params.update(port_range_min = port, port_range_max = port)
if remote_cidr:
params.update(remote_ip_prefix = remote_cidr)
_ = self._connection.network.security_group_rules.create(**params)
return self.fetch_firewall_rules_for_machine(machine)
@convert_exceptions
def remove_firewall_rule_from_machine(self, machine, rule):
machine = machine.id if isinstance(machine, dto.Machine) else machine
rule = rule.id if isinstance(rule, dto.FirewallRule) else rule
self._connection.network.security_group_rules.delete(rule)
return self.fetch_firewall_rules_for_machine(machine)
def _from_api_floatingip(self, api_floatingip, ports = None):
"""
Converts an OpenStack API floatingip object into a :py:class:`.dto.ExternalIp`.
"""
if api_floatingip.port_id:
if ports:
port = ports[api_floatingip.port_id]
else:
port = self._connection.network.ports.get(api_floatingip.port_id)
machine_id = port.device_id
else:
machine_id = None
return dto.ExternalIp(
api_floatingip.id,
api_floatingip.floating_ip_address,
machine_id
)
@convert_exceptions
def external_ips(self):
"""
See :py:meth:`.base.ScopedSession.external_ips`.
"""
self._log("Fetching floating ips")
fips = list(self._connection.network.floatingips.all())
self._log("Found %s floating ips", len(fips))
# If any floating IPs were found, fetch all the ports in one go and index them
# by ID so we can locate the attached machines without making one request per port
if fips:
self._log("Fetching ports")
ports = { p.id: p for p in self._connection.network.ports.all() }
else:
ports = {}
return tuple(self._from_api_floatingip(fip, ports) for fip in fips)
@convert_exceptions
def allocate_external_ip(self):
"""
See :py:meth:`.base.ScopedSession.allocate_external_ip`.
"""
self._log("Allocating new floating ip")
# Get the external network to allocate IPs on
extnet = self._external_network()
# Create a new floating IP on that network
fip = self._connection.network.floatingips.create(floating_network_id = extnet.id)
self._log("Allocated new floating ip '%s'", fip.floating_ip_address)
return self._from_api_floatingip(fip)
@convert_exceptions
def find_external_ip(self, ip):
"""
See :py:meth:`.base.ScopedSession.find_external_ip`.
"""
self._log("Fetching floating IP with id '%s'", ip)
fip = self._connection.network.floatingips.get(ip)
return self._from_api_floatingip(fip)
@convert_exceptions
def attach_external_ip(self, ip, machine):
"""
See :py:meth:`.base.ScopedSession.attach_external_ip`.
"""
machine = machine.id if isinstance(machine, dto.Machine) else machine
ip = ip.id if isinstance(ip, dto.ExternalIp) else ip
self._log("Attaching floating ip '%s' to server '%s'", ip, machine)
# Get the port that attaches the machine to the tenant network
tenant_network = self._tenant_network()
if tenant_network:
port = next(
self._connection.network.ports.all(
device_id = machine,
network_id = tenant_network.id
),
None
)
else:
port = None
if not port:
raise errors.InvalidOperationError("Machine is not connected to tenant network.")
# If there is already a floating IP associated with the port, detach it
current = self._connection.network.floatingips.find_by_port_id(port.id)
if current:
current._update(port_id = None)
# Find the floating IP instance and associate the floating IP with the port
fip = self._connection.network.floatingips.get(ip)
return self._from_api_floatingip(fip._update(port_id = port.id))
@convert_exceptions
def detach_external_ip(self, ip):
"""
See :py:meth:`.base.ScopedSession.detach_external_ip`.
"""
ip = ip.id if isinstance(ip, dto.ExternalIp) else ip
self._log("Detaching floating ip '%s'", ip)
# Find the floating IP instance for the given address
fip = self._connection.network.floatingips.get(ip)
# Remove any association for the floating IP
return self._from_api_floatingip(fip._update(port_id = None))
_VOLUME_STATUSES = {
"creating": dto.VolumeStatus.CREATING,
"available": dto.VolumeStatus.AVAILABLE,
"reserved": dto.VolumeStatus.ATTACHING,
"attaching": dto.VolumeStatus.ATTACHING,
"detaching": dto.VolumeStatus.DETACHING,
"in-use": dto.VolumeStatus.IN_USE,
"deleting": dto.VolumeStatus.DELETING,
"error": dto.VolumeStatus.ERROR,
"error_deleting": dto.VolumeStatus.ERROR,
"error_backing-up": dto.VolumeStatus.ERROR,
"error_restoring": dto.VolumeStatus.ERROR,
"error_extending": dto.VolumeStatus.ERROR,
}
def _from_api_volume(self, api_volume):
"""
Converts an OpenStack API volume object into a :py:class:`.dto.Volume`.
"""
# Work out the volume status
status = self._VOLUME_STATUSES.get(
api_volume.status.lower(),
dto.VolumeStatus.OTHER
)
try:
attachment = api_volume.attachments[0]
except IndexError:
attachment = None
return dto.Volume(
api_volume.id,
# If there is no name, use part of the ID
api_volume.name or api_volume.id[:13],
status,
api_volume.size,
attachment["server_id"] if attachment else None,
attachment["device"] if attachment else None
)
@convert_exceptions
def volumes(self):
"""
See :py:meth:`.base.ScopedSession.volumes`.
"""
self._log("Fetching available volumes")
volumes = tuple(
self._from_api_volume(v)
for v in self._connection.block_store.volumes.all()
)
self._log("Found %s volumes", len(volumes))
return volumes
@convert_exceptions
def find_volume(self, id):
"""
See :py:meth:`.base.ScopedSession.find_volume`.
"""
self._log("Fetching volume with id '%s'", id)
volume = self._connection.block_store.volumes.get(id)
return self._from_api_volume(volume)
@convert_exceptions
def create_volume(self, name, size):
"""
See :py:meth:`.base.ScopedSession.create_volume`.
"""
self._log("Creating volume '%s' (size: %s)", name, size)
volume = self._connection.block_store.volumes.create(name = name, size = size)
return self.find_volume(volume.id)
@convert_exceptions
def delete_volume(self, volume):
"""
See :py:meth:`.base.ScopedSession.delete_volume`.
"""
volume = volume if isinstance(volume, dto.Volume) else self.find_volume(volume)
if volume.status not in [dto.VolumeStatus.AVAILABLE, dto.VolumeStatus.ERROR]:
raise errors.InvalidOperationError(
"Cannot delete volume with status {}.".format(volume.status.name)
)
self._log("Deleting volume '%s'", volume.id)
self._connection.block_store.volumes.delete(volume.id)
try:
return self.find_volume(volume.id)
except errors.ObjectNotFoundError:
return None
@convert_exceptions
def attach_volume(self, volume, machine):
"""
See :py:meth:`.base.ScopedSession.attach_volume`.
"""
volume = volume if isinstance(volume, dto.Volume) else self.find_volume(volume)
machine = machine.id if isinstance(machine, dto.Machine) else machine
# If the volume is already attached to the machine there is nothing to do
if volume.machine_id == machine:
return volume
# The volume must be available before attaching
if volume.status != dto.VolumeStatus.AVAILABLE:
raise errors.InvalidOperationError(
"Volume must be AVAILABLE before attaching."
)
self._log("Attaching volume '%s' to server '%s'", volume.id, machine)
server = self._connection.compute.servers.get(machine)
server.volume_attachments.create(volume_id = volume.id)
# Refresh the volume in the cache
self._connection.block_store.volumes.get(volume.id, force = True)
return self.find_volume(volume.id)
@convert_exceptions
def detach_volume(self, volume):
"""
See :py:meth:`.base.ScopedSession.detach_volume`.
"""
volume = volume if isinstance(volume, dto.Volume) else self.find_volume(volume)
# If the volume is already detached, we are done
if not volume.machine_id:
return volume
self._log("Detaching volume '%s' from '%s'", volume.id, volume.machine_id)
server = self._connection.compute.servers.get(volume.machine_id)
server.volume_attachments.find_by_volume_id(volume.id, as_params = False)._delete()
# Refresh the volume in the cache
self._connection.block_store.volumes.get(volume.id, force = True)
return self.find_volume(volume.id)
@property
def cluster_manager(self):
"""
Returns the cluster manager for the tenancy.
"""
# Lazily instantiate the cluster manager the first time it is asked for.
if not hasattr(self, "_cluster_manager"):
if self._cluster_engine:
self._cluster_manager = self._cluster_engine.create_manager(
self._username,
self._tenancy
)
else:
self._cluster_manager = None
# If there is still no cluster manager, clusters are not supported
if not self._cluster_manager:
raise errors.UnsupportedOperationError(
"Clusters are not supported for this tenancy."
)
return self._cluster_manager
def cluster_credential(self):
"""
See :py:meth:`.base.ScopedSession.cluster_credential`.
"""
# Return a credential that uses the current token to interact with OpenStack
return Credential(
type = "openstack_token",
data = dict(
auth_url = self._connection.auth_url,
project_id = self._connection.project_id,
token = self._connection.token,
verify_ssl = self._connection.verify
)
)
def cluster_parameters(self):
"""
See :py:meth:`.base.ScopedSession.cluster_parameters`.
"""
# Inject information about the networks to use
return dict(
cluster_floating_network = self._external_network().name,
cluster_network = self._tenant_network(True).name
)
def cluster_modify(self, cluster):
"""
See :py:meth:`.base.ScopedSession.cluster_modify`.
"""
# Remove injected parameters from the cluster params
params = {
k: v
for k, v in cluster.parameter_values.items()
if k not in {"cluster_floating_network", "cluster_network"}
}
# Add any tags attached to the stack
try:
stack = self._connection.orchestration.stacks.find_by_stack_name(cluster.name)
except (api.ServiceNotSupported, rackit.NotFound):
tags = cluster.tags
else:
tags = list(cluster.tags)
if stack:
tags.extend(stack.tags or [])
original_error = (cluster.error_message or "").lower()
# Convert quota-related error messages based on known OpenStack errors
if any(m in original_error for m in {"quota exceeded", "exceedsavailablequota"}):
if "floatingip" in original_error:
error_message = (
"Could not find an external IP for deployment. "
"Please ensure an external IP is available and try again."
)
else:
error_message = (
"Requested resources exceed at least one quota. "
"Please check your tenancy quotas and try again."
)
elif cluster.error_message:
error_message = _replace_resource_names(cluster.error_message)
else:
error_message = None
return dataclasses.replace(
cluster,
parameter_values = params,
tags = tags,
error_message = error_message
)
@convert_exceptions
def close(self):
"""
See :py:meth:`.base.ScopedSession.close`.
"""
# Make sure the underlying api connection is closed
self._connection.close()
# Also close the cluster manager if one has been created
if getattr(self, "_cluster_manager", None):
self._cluster_manager.close()
| 41.244094 | 99 | 0.60252 |
acf2c31a33045cdd8694f82c79523ae69cfd5a12 | 45,560 | py | Python | ib_insync/ib.py | stnatter/ib_insync | b79d6c4c275dacd83c7dd8833cc64b9cd1ddf4e5 | [
"BSD-2-Clause"
] | 2 | 2019-01-17T03:31:16.000Z | 2019-02-25T12:15:00.000Z | ib_insync/ib.py | SteffenNa/ib_insync | b79d6c4c275dacd83c7dd8833cc64b9cd1ddf4e5 | [
"BSD-2-Clause"
] | null | null | null | ib_insync/ib.py | SteffenNa/ib_insync | b79d6c4c275dacd83c7dd8833cc64b9cd1ddf4e5 | [
"BSD-2-Clause"
] | null | null | null | import asyncio
import logging
import datetime
import time
from typing import Iterator
from collections.abc import Awaitable # @UnusedImport
from ibapi.account_summary_tags import AccountSummaryTags
from ib_insync.client import Client
from ib_insync.wrapper import Wrapper
from ib_insync.contract import Contract
from ib_insync.ticker import Ticker
from ib_insync.order import Order, LimitOrder, StopOrder, MarketOrder
from ib_insync.objects import *
import ib_insync.util as util
__all__ = ['IB']
_logger = logging.getLogger('ib_insync.ib')
def api(f): return f # visual marker for API request methods
class IB:
"""
Provides both a blocking and an asynchronous interface
to the IB Python API, using asyncio networking and event loop.
The IB class offers direct access to the current state, such as
orders, executions, positions, tickers etc. This state is
automatically kept in sync with the TWS/IBG application.
This class has most request methods of EClient, with the
same names and parameters (except for the reqId parameter
which is not needed anymore).
Request methods that return a result come in two versions:
* Blocking: Will block until complete and return the result.
The current state will be kept updated while the request is ongoing;
* Asynchronous: All methods that have the "Async" postfix.
Implemented as coroutines or methods that return a Future and
intended for advanced users.
**The One Rule:**
While some of the request methods are blocking from the perspective
of the user, the framework will still keep spinning in the background
and handle all messages received from TWS/IBG. It is important to
not block the framework from doing its work. If, for example,
the user code spends much time in a calculation, or uses time.sleep()
with a long delay, the framework will stop spinning, messages
accumulate and things may go awry.
The one rule when working with the IB class is therefore that
**user code may not block for too long**.
To be clear, the IB request methods are okay to use and do not
count towards the user operation time, no matter how long the
request takes to finish.
So what is "too long"? That depends on the situation. If, for example,
the timestamp of tick data is to remain accurate within a millisecond,
then the user code must not spend longer than a millisecond. If, on
the other extreme, there is very little incoming data and there
is no desire for accurate timestamps, then the user code can block
for hours.
If a user operation takes a long time then it can be farmed out
to a different process.
Alternatively the operation can be made such that it periodically
calls IB.sleep(0); This will let the framework handle any pending
work and return when finished. The operation should be aware
that the current state may have been updated during the sleep(0) call.
For introducing a delay, never use time.sleep() but use
:py:meth:`.sleep` instead.
"""
def __init__(self):
self.wrapper = Wrapper()
self.client = Client(self.wrapper)
def __del__(self):
self.disconnect()
def __enter__(self):
return self
def __exit__(self, *_exc):
self.disconnect()
def __repr__(self):
conn = (f'connected to {self.client.host}:'
f'{self.client.port} clientId={self.client.clientId}' if
self.client.isConnected() else 'not connected')
return f'<{self.__class__.__name__} {conn}>'
def connect(self, host: str, port: int, clientId: int, timeout: float=2):
"""
Connect to a TWS or IB gateway application running at host:port.
After the connect the client is immediately ready to serve requests.
This method is blocking.
"""
self.run(self.connectAsync(host, port, clientId, timeout))
return self
def disconnect(self):
"""
Disconnect from a TWS or IB gateway application.
This will clear all session state.
"""
self.wrapper.reset()
if not self.client.isConnected():
return
stats = self.client.connectionStats()
_logger.info(
f'Disconnecting from {self.client.host}:{self.client.port}, '
f'{util.formatSI(stats.numBytesSent)}B sent '
f'in {stats.numMsgSent} messages, '
f'{util.formatSI(stats.numBytesRecv)}B received '
f'in {stats.numMsgRecv} messages, '
f'session time {util.formatSI(stats.duration)}s.')
self.client.disconnect()
@staticmethod
def run(*awaitables: [Awaitable]):
"""
By default run the event loop forever.
When awaitables (like Tasks, Futures or coroutines) are given then
run the event loop until each has completed and return their results.
"""
loop = asyncio.get_event_loop()
if not awaitables:
result = loop.run_forever()
else:
if len(awaitables) == 1:
future = awaitables[0]
else:
future = asyncio.gather(*awaitables)
result = util.syncAwait(future)
return result
@staticmethod
def sleep(secs: [float]=0.02) -> True:
"""
Wait for the given amount of seconds while everything still keeps
processing in the background. Never use time.sleep().
"""
IB.run(asyncio.sleep(secs))
return True
@staticmethod
def timeRange(start: datetime.time, end: datetime.time,
step: float) -> Iterator[datetime.datetime]:
"""
Iterator that waits periodically until certain time points are
reached while yielding those time points.
The startTime and dateTime parameters can be specified as
datetime.datetime, or as datetime.time in which case today
is used as the date.
The step parameter is the number of seconds of each period.
"""
assert step > 0
if isinstance(start, datetime.time):
start = datetime.datetime.combine(datetime.date.today(), start)
if isinstance(end, datetime.time):
end = datetime.datetime.combine(datetime.date.today(), end)
delta = datetime.timedelta(seconds=step)
t = start
while t < datetime.datetime.now():
t += delta
while t <= end:
IB.waitUntil(t)
yield t
t += delta
@staticmethod
def waitUntil(t: datetime.time):
"""
Wait until the given time t is reached.
The time can be specified as datetime.datetime,
or as datetime.time in which case today is used as the date.
"""
if isinstance(t, datetime.time):
t = datetime.datetime.combine(datetime.date.today(), t)
now = datetime.datetime.now(t.tzinfo)
secs = (t - now).total_seconds()
IB.run(asyncio.sleep(secs))
return True
def waitOnUpdate(self) -> True:
"""
Wait on any new update to arrive from the network.
"""
self.run(self.wrapper.updateEvent.wait())
return True
def loopUntil(self, condition=None, timeout: float=0) -> Iterator:
"""
Iteratate until condition is met, with optional timeout in seconds.
The given optional condition is tested after every network packet.
The yielded value is that of the condition or False when timed out.
"""
endTime = time.time() + timeout
while True:
test = condition and condition()
if test:
yield test
return
elif timeout and time.time() > endTime:
yield False
return
else:
yield test
if timeout:
try:
self.run(asyncio.wait_for(
self.wrapper.updateEvent.wait(),
endTime - time.time()))
except asyncio.TimeoutError:
pass
else:
self.waitOnUpdate()
def setCallback(self, eventName, callback):
"""
Set an optional callback to be invoked after an event. Events::
* updated()
* pendingTickers(set(pendingTickers))
* orderStatus(Trade)
* execDetails(Trade, Fill)
* commissionReport(Trade, Fill, CommissionReport)
* updatePortfolio(PortfolioItem)
* position(Position)
* tickNews(NewsTick)
* error(errorCode, errorString)
Unsetting is done by supplying None as callback.
"""
self.wrapper.setCallback(eventName, callback)
def managedAccounts(self) -> [str]:
"""
List of account names.
"""
return list(self.wrapper.accounts)
def accountValues(self) -> [AccountValue]:
"""
List of account values for the default account.
"""
account = self.wrapper.accounts[0]
return [av for av in self.wrapper.accountValues.values()
if av.account == account]
def accountSummary(self, account: str='') -> [AccountValue]:
"""
List of account values for the given account,
or of all accounts if account is left blank.
This method is blocking on first run, non-blocking after that.
"""
if not self.wrapper.acctSummary:
# loaded on demand since it takes ca. 250 ms
self.reqAccountSummary()
if account:
return [v for v in self.wrapper.acctSummary.values()
if v.account == account]
else:
return list(self.wrapper.acctSummary.values())
def portfolio(self) -> [PortfolioItem]:
"""
List of portfolio items of the default account.
"""
account = self.wrapper.accounts[0]
return [v for v in self.wrapper.portfolio[account].values()]
def positions(self, account: str='') -> [Position]:
"""
List of positions for the given account,
or of all accounts if account is left blank.
"""
if account:
return list(self.wrapper.positions[account].values())
else:
return [v for d in self.wrapper.positions.values()
for v in d.values()]
def trades(self) -> [Trade]:
"""
List of all order trades from this session.
"""
return list(self.wrapper.trades.values())
def openTrades(self) -> [Trade]:
"""
List of all open order trades.
"""
return [v for v in self.wrapper.trades.values()
if v.orderStatus.status in OrderStatus.ActiveStates]
def orders(self) -> [Order]:
"""
List of all orders from this session.
"""
return list(trade.order
for trade in self.wrapper.trades.values())
def openOrders(self) -> [Order]:
"""
List of all open orders.
"""
return [trade.order for trade in self.wrapper.trades.values()
if trade.orderStatus.status in OrderStatus.ActiveStates]
def fills(self) -> [Fill]:
"""
List of all fills from this session.
"""
return list(self.wrapper.fills.values())
def executions(self) -> [Execution]:
"""
List of all executions from this session.
"""
return list(fill.execution for fill in self.wrapper.fills.values())
def ticker(self, contract: Contract) -> Ticker:
"""
Get ticker of the given contract. It must have been requested before
with reqMktData with the same contract object. The ticker may not be
ready yet if called directly after :py:meth:`.reqMktData`.
"""
return self.wrapper.tickers.get(id(contract))
def tickers(self) -> [Ticker]:
"""
Get a list of all tickers.
"""
return list(self.wrapper.tickers.values())
def pendingTickers(self) -> [Ticker]:
"""
Get a list of all tickers that have pending ticks or domTicks.
"""
return list(self.wrapper.pendingTickers)
def realtimeBars(self):
"""
Get a list of all live updated bars. These can be 5 second realtime
bars or live updated historical bars.
"""
return list(self.wrapper.reqId2Bars.values())
def newsTicks(self) -> [NewsTick]:
"""
List of ticks with headline news.
The article itself can be retrieved with :py:meth:`.reqNewsArticle`.
"""
return self.wrapper.newsTicks
def newsBulletins(self) -> [NewsBulletin]:
"""
List of IB news bulletins.
"""
return list(self.wrapper.newsBulletins.values())
def reqTickers(self, *contracts: [Contract],
regulatorySnapshot: bool=False) -> [Ticker]:
"""
Request and return a list of snapshot tickers for the given contracts.
The list is returned when all tickers are ready.
This method is blocking.
"""
return self.run(self.reqTickersAsync(*contracts,
regulatorySnapshot=regulatorySnapshot))
def qualifyContracts(self, *contracts: [Contract]) -> [Contract]:
"""
Fully qualify the given contracts in-place. This will fill in
the missing fields in the contract, especially the conId.
Returns a list of contracts that have been successfully qualified.
This method is blocking.
"""
return self.run(self.qualifyContractsAsync(*contracts))
def bracketOrder(self, action: str, quantity: float,
limitPrice: float, takeProfitPrice: float,
stopLossPrice: float) -> BracketOrder:
"""
Create a limit order that is bracketed by a take-profit order and
a stop-loss order. Submit the bracket like:
.. code-block:: python
for o in bracket:
ib.placeOrder(contract, o)
https://interactivebrokers.github.io/tws-api/bracket_order.html
"""
assert action in ('BUY', 'SELL')
reverseAction = 'BUY' if action == 'SELL' else 'SELL'
parent = LimitOrder(
action, quantity, limitPrice,
orderId=self.client.getReqId(),
transmit=False)
takeProfit = LimitOrder(
reverseAction, quantity, takeProfitPrice,
orderId=self.client.getReqId(),
transmit=False,
parentId=parent.orderId)
stopLoss = StopOrder(
reverseAction, quantity, stopLossPrice,
orderId=self.client.getReqId(),
transmit=True,
parentId=parent.orderId)
return BracketOrder(parent, takeProfit, stopLoss)
def oneCancelsAll(self, orders: [Order],
ocaGroup: str, ocaType: int) -> [Order]:
"""
Place the trades in the same OCA group.
https://interactivebrokers.github.io/tws-api/oca.html
"""
for o in orders:
o.ocaGroup = ocaGroup
o.ocaType = ocaType
return orders
def whatIfOrder(self, contract: Contract, order: Order) -> OrderState:
"""
Retrieve commission and margin impact without actually
placing the order. The given order will not be modified in any way.
This method is blocking.
"""
return self.run(self.whatIfOrderAsync(contract, order))
@api
def placeOrder(self, contract: Contract, order: Order) -> Trade:
"""
Place a new order or modify an existing order.
Returns an Trade that is kept live updated with
status changes, fills, etc.
"""
orderId = order.orderId or self.client.getReqId()
self.client.placeOrder(orderId, contract, order)
now = datetime.datetime.now(datetime.timezone.utc)
if not isinstance(order, Order):
order = Order(**order.__dict__)
trade = self.wrapper.trades.get(orderId)
if trade:
# this is a modification of an existing order
assert trade.orderStatus.status in OrderStatus.ActiveStates
logEntry = TradeLogEntry(now,
trade.orderStatus.status, 'Modify')
trade.log.append(logEntry)
_logger.info(f'placeOrder: Modify order {trade}')
else:
# this is a new order
order.orderId = orderId
orderStatus = OrderStatus(status=OrderStatus.PendingSubmit)
logEntry = TradeLogEntry(now, orderStatus.status, '')
trade = Trade(
contract, order, orderStatus, [], [logEntry])
self.wrapper.trades[orderId] = trade
_logger.info(f'placeOrder: New order {trade}')
return trade
@api
def cancelOrder(self, order: Order) -> Trade:
"""
Cancel the order and return the Trade it belongs to.
"""
self.client.cancelOrder(order.orderId)
now = datetime.datetime.now(datetime.timezone.utc)
trade = self.wrapper.trades.get(order.orderId)
if trade:
if trade.orderStatus.status in OrderStatus.ActiveStates:
logEntry = TradeLogEntry(now, OrderStatus.PendingCancel, '')
trade.log.append(logEntry)
_logger.info(f'cancelOrder: {trade}')
else:
_logger.error(f'cancelOrder: Unknown orderId {order.orderId}')
return trade
@api
def reqGlobalCancel(self) -> None:
"""
Cancel all active trades including those placed by other
clients or TWS/IB gateway.
"""
self.client.reqGlobalCancel()
_logger.info(f'reqGlobalCancel')
@api
def reqAccountUpdates(self) -> None:
"""
This is called at startup - no need to call again.
Request account and portfolio values of the default account
and keep updated. Returns when both account values and portfolio
are filled.
This method is blocking.
"""
self.run(self.reqAccountUpdatesAsync())
@api
def reqAccountSummary(self) -> None:
"""
It is recommended to use :py:meth:`.accountSummary` instead.
Request account values for all accounts and keep them updated.
Returns when account summary is filled.
This method is blocking.
"""
self.run(self.reqAccountSummaryAsync())
@api
def reqOpenOrders(self) -> [Order]:
"""
Request and return a list a list of open orders.
This method can give stale information where a new open order is not
reported or an already filled or canceled order is reported as open.
It is recommended to use the more reliable and much faster
:py:meth:`.openTrades` or :py:meth:`.openOrders` methods instead.
This method is blocking.
"""
return self.run(self.reqOpenOrdersAsync())
@api
def reqExecutions(self,
execFilter: ExecutionFilter=None) -> [Fill]:
"""
It is recommended to use :py:meth:`.fills` or
:py:meth:`.executions` instead.
Request and return a list a list of fills.
This method is blocking.
"""
return self.run(self.reqExecutionsAsync(execFilter))
@api
def reqPositions(self) -> [Position]:
"""
It is recommended to use :py:meth:`.positions` instead.
Request and return a list of positions for all accounts.
This method is blocking.
"""
return self.run(self.reqPositionsAsync())
@api
def reqContractDetails(self, contract: Contract) -> [ContractDetails]:
"""
Get a list of contract details that match the given contract.
If the returned list is empty then the contract is not known;
If the list has multiple values then the contract is ambiguous.
The fully qualified contract is available in the the
ContractDetails.summary attribute.
This method is blocking.
https://interactivebrokers.github.io/tws-api/contract_details.html
"""
return self.run(self.reqContractDetailsAsync(contract))
@api
def reqMatchingSymbols(self, pattern: str) -> [ContractDescription]:
"""
Request contract descriptions of contracts that match the given
pattern.
This method is blocking.
https://interactivebrokers.github.io/tws-api/matching_symbols.html
"""
return self.run(self.reqMatchingSymbolsAsync(pattern))
@api
def reqRealTimeBars(self, contract, barSize, whatToShow,
useRTH, realTimeBarsOptions=None) -> [RealTimeBar]:
"""
Request realtime 5 second bars.
https://interactivebrokers.github.io/tws-api/realtime_bars.html
"""
reqId = self.client.getReqId()
bars = self.wrapper.startLiveBars(reqId)
self.client.reqRealTimeBars(reqId, contract, barSize, whatToShow,
useRTH, realTimeBarsOptions)
return bars
@api
def cancelRealTimeBars(self, bars):
"""
Cancel the realtime bars subscription.
"""
reqId = self.wrapper.endLiveBars(bars)
if reqId:
self.client.cancelRealTimeBars(reqId)
else:
_logger.error('cancelRealTimeBars: No reqId found')
@api
def reqHistoricalData(self, contract: Contract, endDateTime: object,
durationStr: str, barSizeSetting: str,
whatToShow: str, useRTH: bool,
formatDate: int=1, keepUpToDate: bool=False,
chartOptions=None) -> [BarData]:
"""
The endDateTime can be set to '' to indicate the current time,
or it can be given as a datetime.date or datetime.datetime,
or it can be given as a string in 'yyyyMMdd HH:mm:ss' format.
If formatDate=2 is used for an intraday request the returned date
field will be a timezone-aware datetime.datetime with UTC timezone.
This method is blocking.
https://interactivebrokers.github.io/tws-api/historical_bars.html
"""
return self.run(self.reqHistoricalDataAsync(contract, endDateTime,
durationStr, barSizeSetting, whatToShow,
useRTH, formatDate, keepUpToDate, chartOptions))
@api
def cancelHistoricalData(self, bars):
"""
Cancel the update subscription for the historical bars.
"""
reqId = self.wrapper.endLiveBars(bars)
if reqId:
self.client.cancelHistoricalData(reqId)
else:
_logger.error('cancelHistoricalData: No reqId found')
@api
def reqHistoricalTicks(self, contract, startDateTime, endDateTime,
numberOfTicks, whatToShow, useRth, ignoreSize, miscOptions):
"""
Request historical ticks.
This method is blocking.
https://interactivebrokers.github.io/tws-api/historical_time_and_sales.html
"""
return self.run(self.reqHistoricalTicksAsync(contract,
startDateTime, endDateTime, numberOfTicks, whatToShow, useRth,
ignoreSize, miscOptions))
@api
def reqMarketDataType(self, marketDataType: int):
"""
marketDataType:
* 1 = Live
* 2 = Frozen
* 3 = Delayed
* 4 = Delayed frozen
https://interactivebrokers.github.io/tws-api/market_data_type.html
"""
self.client.reqMarketDataType(marketDataType)
@api
def reqHeadTimeStamp(self, contract: Contract, whatToShow: str,
useRTH: bool, formatDate: int=1) -> datetime.datetime:
"""
Get the datetime of earliest available historical data for the contract.
If formatDate=2 then the result is returned as a
timezone-aware datetime.datetime with UTC timezone.
"""
return self.run(self.reqHeadTimeStampAsync(contract, whatToShow,
useRTH, formatDate))
@api
def reqMktData(self, contract: Contract, genericTickList: str,
snapshot: bool, regulatorySnapshot: bool,
mktDataOptions=None) -> Ticker:
"""
Subscribe to tick data or request a snapshot.
Returns the Ticker that holds the market data. The ticker will
inititially be empty and gradually (after a couple of seconds)
be filled.
https://interactivebrokers.github.io/tws-api/md_request.html
"""
reqId = self.client.getReqId()
ticker = self.wrapper.startTicker(reqId, contract)
self.client.reqMktData(reqId, contract, genericTickList,
snapshot, regulatorySnapshot, mktDataOptions)
return ticker
def cancelMktData(self, contract: Contract):
"""
Unsubscribe tick data for the given contract.
The contract object must be the same as used to subscribe with.
"""
ticker = self.ticker(contract)
reqId = self.wrapper.endTicker(ticker)
if reqId:
self.client.cancelMktData(reqId)
else:
_logger.error('cancelMktData: '
f'No reqId found for contract {contract}')
@api
def reqMktDepthExchanges(self):
"""
Get those exchanges that have have multiple market makers
(and have ticks returned with marketMaker info).
"""
return self.run(self.reqMktDepthExchangesAsync())
@api
def reqMktDepth(self, contract: Contract, numRows: int=5,
mktDepthOptions=None) -> Ticker:
"""
"""
reqId = self.client.getReqId()
ticker = self.wrapper.startTicker(reqId, contract, isMktDepth=True)
self.client.reqMktDepth(reqId, contract, numRows, mktDepthOptions)
return ticker
@api
def cancelMktDepth(self, contract: Contract):
"""
Unsubscribe market depth data for the given contract.
The contract object must be the same as used to subscribe with.
"""
ticker = self.ticker(contract)
reqId = self.wrapper.endTicker(ticker, isMktDepth=True)
if reqId:
self.client.cancelMktDepth(reqId)
else:
_logger.error('cancelMktDepth: '
f'No reqId found for contract {contract}')
@api
def reqHistogramData(self, contract: Contract,
useRTH: bool, period: str) -> [HistogramData]:
"""
Get histogram data of the contract over the period.
This method is blocking.
https://interactivebrokers.github.io/tws-api/histograms.html
"""
return self.run(self.reqHistogramDataAsync(
contract, useRTH, period))
@api
def reqFundamentalData(self, contract: Contract, reportType: str,
fundamentalDataOptions=None) -> str:
"""
Get Reuters' fundamental data of the contract in XML format.
This method is blocking.
https://interactivebrokers.github.io/tws-api/reuters_fundamentals.html
"""
return self.run(self.reqFundamentalDataAsync(contract, reportType,
fundamentalDataOptions))
@api
def reqScannerData(self, subscription: ScannerSubscription,
scannerSubscriptionOptions=None) -> [ScanData]:
"""
Do a market scan.
This method is blocking.
https://interactivebrokers.github.io/tws-api/market_scanners.html
"""
return self.run(self.reqScannerSubscriptionAsync(
subscription, scannerSubscriptionOptions))
@api
def reqScannerParameters(self) -> str:
"""
Requests an XML list of scanner parameters.
This method is blocking.
"""
return self.run(self.reqScannerParametersAsync())
@api
def calculateImpliedVolatility(self, contract: Contract,
optionPrice: float, underPrice: float,
implVolOptions=None) -> OptionComputation:
"""
Calculate the volatility given the option price.
This method is blocking.
https://interactivebrokers.github.io/tws-api/option_computations.html
"""
return self.run(self.calculateImpliedVolatilityAsync(
contract, optionPrice, underPrice, implVolOptions))
@api
def calculateOptionPrice(self, contract: Contract,
volatility: float, underPrice: float,
optPrcOptions=None) -> OptionComputation:
"""
Calculate the option price given the volatility.
This method is blocking.
https://interactivebrokers.github.io/tws-api/option_computations.html
"""
return self.run(self.calculateOptionPriceAsync(
contract, volatility, underPrice, optPrcOptions))
@api
def reqSecDefOptParams(self, underlyingSymbol: str,
futFopExchange: str, underlyingSecType: str,
underlyingConId: str) -> [OptionChain]:
"""
Get the option chain.
This method is blocking.
https://interactivebrokers.github.io/tws-api/options.html
"""
return self.run(self.reqSecDefOptParamsAsync(underlyingSymbol,
futFopExchange, underlyingSecType, underlyingConId))
@api
def exerciseOptions(self, contract, exerciseAction, exerciseQuantity,
account, override):
"""
https://interactivebrokers.github.io/tws-api/options.html
"""
reqId = self.client.getReqId()
self.client.exerciseOptions(reqId, contract, exerciseAction,
exerciseQuantity, account, override)
@api
def reqNewsProviders(self) -> [NewsProvider]:
"""
Get a list of news providers.
This method is blocking.
"""
return self.run(self.reqNewsProvidersAsync())
@api
def reqNewsArticle(self, providerCode: str, articleId: str):
"""
Get the body of a news article.
This method is blocking.
https://interactivebrokers.github.io/tws-api/news.html
"""
return self.run(self.reqNewsArticleAsync(providerCode, articleId))
@api
def reqHistoricalNews(self, conId: int, providerCodes: str,
startDateTime: str, endDateTime: str, totalResults: int,
historicalNewsOptions=None) -> HistoricalNews:
"""
Get historical news headline.
This method is blocking.
"""
return self.run(self.reqHistoricalNewsAsync(conId, providerCodes,
startDateTime, endDateTime, totalResults,
historicalNewsOptions))
@api
def reqNewsBulletins(self, allMessages: bool):
"""
Subscribe to IB news bulletins. If allMessages=True then fetch
all messages for the day.
"""
self.client.reqNewsBulletins(allMessages)
@api
def cancelNewsBulletins(self):
"""
Cancel subscription to IB news bulletins.
"""
self.client.cancelNewsBulletins()
@api
def requestFA(self, faDataType: int) -> str:
"""
faDataType:
* 1 = Groups;
* 2 = Profiles;
* 3 = Account Aliases.
This method is blocking.
https://interactivebrokers.github.io/tws-api/financial_advisor_methods_and_orders.html
"""
return self.run(self.requestFAAsync(faDataType))
@api
def replaceFA(self, faDataType: int, xml: str):
"""
https://interactivebrokers.github.io/tws-api/financial_advisor_methods_and_orders.html
"""
self.client.replaceFA(faDataType, xml)
# now entering the parallel async universe
async def connectAsync(self, host, port, clientId, timeout=2):
self.wrapper.clientId = clientId
await self.client.connectAsync(host, port, clientId, timeout)
await asyncio.gather(
self.reqAccountUpdatesAsync(),
self.reqPositionsAsync(),
self.reqExecutionsAsync())
_logger.info('Synchronization complete')
async def qualifyContractsAsync(self, *contracts):
detailsLists = await asyncio.gather(
*(self.reqContractDetailsAsync(c) for c in contracts))
result = []
for contract, detailsList in zip(contracts, detailsLists):
if not detailsList:
_logger.error(f'Unknown contract: {contract}')
elif len(detailsList) > 1:
possibles = [details.summary for details in detailsList]
_logger.error(f'Ambiguous contract: {contract}, '
f'possibles are {possibles}')
else:
details = detailsList[0]
contract.update(**details.summary.dict())
result.append(contract)
return result
async def reqTickersAsync(self, *contracts, regulatorySnapshot=False):
futures = []
for contract in contracts:
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
futures.append(future)
self.wrapper.startTicker(reqId, contract)
self.client.reqMktData(reqId, contract, '',
True, regulatorySnapshot, [])
await asyncio.gather(*futures)
return [self.ticker(c) for c in contracts]
def whatIfOrderAsync(self, contract, order):
whatIfOrder = Order(**order.dict()).update(whatIf=True)
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.placeOrder(reqId, contract, whatIfOrder)
return future
def reqAccountUpdatesAsync(self,):
defaultAccount = self.client.getAccounts()[0]
future = self.wrapper.startReq('accountValues')
self.client.reqAccountUpdates(True, defaultAccount)
return future
def reqAccountSummaryAsync(self):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqAccountSummary(reqId, groupName='All',
tags=AccountSummaryTags.AllTags)
return future
def reqOpenOrdersAsync(self):
future = self.wrapper.startReq('openOrders')
self.client.reqOpenOrders()
return future
def reqExecutionsAsync(self, execFilter=None):
execFilter = execFilter or ExecutionFilter()
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqExecutions(reqId, execFilter)
return future
def reqPositionsAsync(self):
future = self.wrapper.startReq('positions')
self.client.reqPositions()
return future
def reqContractDetailsAsync(self, contract):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqContractDetails(reqId, contract)
return future
async def reqMatchingSymbolsAsync(self, pattern):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqMatchingSymbols(reqId, pattern)
try:
await asyncio.wait_for(future, 4)
return future.result()
except asyncio.TimeoutError:
_logger.error('reqMatchingSymbolsAsync: Timeout')
def reqHistoricalDataAsync(self, contract, endDateTime,
durationStr, barSizeSetting, whatToShow, useRTH,
formatDate=1, keepUpToDate=False, chartOptions=None):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
if keepUpToDate:
self.wrapper.startLiveBars(reqId, historical=True)
end = util.formatIBDatetime(endDateTime)
self.client.reqHistoricalData(reqId, contract, end,
durationStr, barSizeSetting, whatToShow,
useRTH, formatDate, keepUpToDate, chartOptions)
return future
def reqHistoricalTicksAsync(self, contract, startDateTime, endDateTime,
numberOfTicks, whatToShow, useRth, ignoreSize, miscOptions):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
start = util.formatIBDatetime(startDateTime)
end = util.formatIBDatetime(endDateTime)
self.client.reqHistoricalTicks(reqId, contract, start, end,
numberOfTicks, whatToShow, useRth,
ignoreSize, miscOptions)
return future
def reqHeadTimeStampAsync(self, contract, whatToShow,
useRTH, formatDate):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqHeadTimeStamp(reqId, contract, whatToShow,
useRTH, formatDate)
return future
def reqMktDepthExchangesAsync(self):
future = self.wrapper.startReq('mktDepthExchanges')
self.client.reqMktDepthExchanges()
return future
def reqHistogramDataAsync(self, contract, useRTH, period):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqHistogramData(reqId, contract, useRTH, period)
return future
def reqFundamentalDataAsync(self, contract, reportType,
fundamentalDataOptions=None):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqFundamentalData(reqId, contract, reportType,
fundamentalDataOptions)
return future
async def reqScannerSubscriptionAsync(self, subscription,
scannerSubscriptionOptions=None):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqScannerSubscription(reqId, subscription,
scannerSubscriptionOptions)
await future
self.client.cancelScannerSubscription(reqId)
return future.result()
def reqScannerParametersAsync(self):
future = self.wrapper.startReq('scannerParams')
self.client.reqScannerParameters()
return future
async def calculateImpliedVolatilityAsync(self, contract, optionPrice,
underPrice, implVolOptions):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.calculateImpliedVolatility(reqId, contract, optionPrice,
underPrice, implVolOptions)
try:
await asyncio.wait_for(future, 4)
return future.result()
except asyncio.TimeoutError:
_logger.error('calculateImpliedVolatilityAsync: Timeout')
return
finally:
self.client.cancelCalculateImpliedVolatility(reqId)
async def calculateOptionPriceAsync(self, contract, volatility,
underPrice, optPrcOptions):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.calculateOptionPrice(reqId, contract, volatility,
underPrice, optPrcOptions)
try:
await asyncio.wait_for(future, 4)
return future.result()
except asyncio.TimeoutError:
_logger.error('calculateOptionPriceAsync: Timeout')
return
finally:
self.client.cancelCalculateOptionPrice(reqId)
def reqSecDefOptParamsAsync(self, underlyingSymbol,
futFopExchange, underlyingSecType, underlyingConId):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqSecDefOptParams(reqId, underlyingSymbol,
futFopExchange, underlyingSecType, underlyingConId)
return future
def reqNewsProvidersAsync(self):
future = self.wrapper.startReq('newsProviders')
self.client.reqNewsProviders()
return future
def reqNewsArticleAsync(self, providerCode, articleId):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
self.client.reqNewsArticle(reqId, providerCode, articleId)
return future
async def reqHistoricalNewsAsync(self, conId, providerCodes,
startDateTime, endDateTime, totalResults,
_historicalNewsOptions=None):
reqId = self.client.getReqId()
future = self.wrapper.startReq(reqId)
# API does not take historicalNewsOptions parameter
self.client.reqHistoricalNews(reqId, conId, providerCodes,
startDateTime, endDateTime, totalResults)
try:
await asyncio.wait_for(future, 4)
return future.result()
except asyncio.TimeoutError:
_logger.error('reqHistoricalNewsAsync: Timeout')
async def requestFAAsync(self, faDataType):
future = self.wrapper.startReq('requestFA')
self.client.requestFA(faDataType)
try:
await asyncio.wait_for(future, 4)
return future.result()
except asyncio.TimeoutError:
_logger.error('requestFAAsync: Timeout')
if __name__ == '__main__':
# import uvloop
# asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
from ib_insync.contract import Stock, Forex, Index, Option, Future, CFD
asyncio.get_event_loop().set_debug(True)
util.logToConsole(logging.INFO)
ib = IB()
ib.connect('127.0.0.1', 7497, clientId=21)
aex = Index('EOE', 'FTA')
eurusd = Forex('EURUSD')
intc = Stock('INTC', 'SMART', 'USD', primaryExchange='NASDAQ')
amd = Stock('AMD', 'SMART', 'USD')
aapl = Stock('AAPL', 'SMART', 'USD')
tsla = Stock('TSLA', 'SMART', 'USD')
spy = Stock('SPY', 'ARCA')
wrongContract = Forex('lalala')
option = Option('EOE', '20171215', 490, 'P', 'FTA', multiplier=100)
if 0:
cds = ib.reqContractDetails(aex)
print(cds)
cd = cds[0]
print(cd)
conId = cd.summary.conId
ib.qualifyContracts(aex, eurusd, intc)
print(aex, eurusd, intc)
print(ib.reqContractDetails(wrongContract))
if 0:
sub = ScannerSubscription(instrument='FUT.US',
locationCode='FUT.GLOBEX', scanCode='TOP_PERC_GAIN')
print(ib.reqScannerData(sub, []))
print(len(ib.reqScannerParameters()))
if 0:
print(ib.calculateImpliedVolatility(option,
optionPrice=6.1, underPrice=525))
print(ib.calculateOptionPrice(option,
volatility=0.14, underPrice=525))
if 0:
ib.qualifyContracts(amd)
ticker = ib.reqTickers(amd)
print(ticker)
if 0:
ib.qualifyContracts(aex)
chains = ib.reqSecDefOptParams(aex.symbol, '', aex.secType, aex.conId)
chain = next(c for c in chains if c.tradingClass == 'AEX')
print(chain)
if 0:
print(ib.reqContractDetails(aapl))
bars = ib.reqHistoricalData(
aapl, '', '1 D', '1 hour', 'MIDPOINT', False, 1, False, None)
print(len(bars))
print(bars[0])
if 0:
bars = ib.reqHistoricalData(
aapl, '', '1 D', '1 hour', 'MIDPOINT', False, 1, True, None)
prevBar = None
while ib.waitOnUpdate():
currBar = bars[-1] if bars else None
if prevBar != currBar:
prevBar = currBar
print(currBar)
if 0:
ib.reqMktData(tsla, '165,233', False, False, None)
ib.sleep(20000)
if 0:
ib.reqMarketDataType(2)
print(ib.reqTickers(amd))
print(ib.reqTickers(eurusd))
print(ib.reqTickers(amd, eurusd, aex))
if 0:
m = ib.reqMatchingSymbols('Intel')
print(m)
if 0:
print(ib.requestFA(1))
if 0:
print(ib.reqHeadTimeStamp(intc, 'TRADES', True, 1))
if 0:
print(ib.reqFundamentalData(intc, 'ReportsFinSummary'))
if 0:
newsProviders = ib.reqNewsProviders()
print(newsProviders)
codes = '+'.join(np.code for np in newsProviders)
ib.qualifyContracts(intc)
headlines = ib.reqHistoricalNews(intc.conId, codes, "", "", 10)
latest = headlines[0]
print(latest)
article = ib.reqNewsArticle(latest.providerCode, latest.articleId)
print(article)
if 0:
ib.reqNewsBulletins(True)
ib.sleep(5)
print(ib.newsBulletins())
if 0:
ticker = ib.reqMktDepth(eurusd, 5)
while ib.sleep(5):
print([d.price for d in ticker.domBids],
[d.price for d in ticker.domAsks])
if 0:
order = MarketOrder('BUY', 100)
state = ib.whatIfOrder(amd, order)
print(state)
if 0:
start = datetime.datetime(2017, 7, 24, 16, 0, 0)
end = ''
ticks = ib.reqHistoricalTicks(
eurusd, start, end, 100, 'MIDPOINT', True, False, [])
print(ticks)
if 0:
start = datetime.time(10, 10, 10)
end = datetime.time(14, 13)
for t in ib.timeRange(start, end, 5):
print(t)
if 0:
histo = ib.reqHistogramData(amd, True, '1 week')
print(histo)
# ib.disconnect()
| 35.565964 | 94 | 0.616264 |
acf2c37552fc2d5be87d77f12c7e16ca2a441209 | 408 | py | Python | pipe_anchorages/transforms/source.py | GlobalFishingWatch/anchorages_pipeline | 88764545b693bfb65fc7a7f62a344fb2afbc3d97 | [
"Apache-2.0"
] | 3 | 2017-12-22T10:19:15.000Z | 2020-04-20T10:28:43.000Z | pipe_anchorages/transforms/source.py | GlobalFishingWatch/anchorages_pipeline | 88764545b693bfb65fc7a7f62a344fb2afbc3d97 | [
"Apache-2.0"
] | 32 | 2017-12-06T13:01:46.000Z | 2022-03-30T22:52:04.000Z | pipe_anchorages/transforms/source.py | GlobalFishingWatch/anchorages_pipeline | 88764545b693bfb65fc7a7f62a344fb2afbc3d97 | [
"Apache-2.0"
] | 3 | 2018-01-21T14:07:58.000Z | 2021-07-28T16:02:20.000Z |
from apache_beam import PTransform
from apache_beam import io
class QuerySource(PTransform):
def __init__(self, query, use_standard_sql=False):
self.query = query
self.use_standard_sql = use_standard_sql
def expand(self, xs):
return (
xs
| io.Read(io.gcp.bigquery.BigQuerySource(query=self.query, use_standard_sql=self.use_standard_sql))
)
| 25.5 | 111 | 0.678922 |
acf2c3ec09b5d5cdc1a43e323aa83acda8b1b837 | 5,356 | py | Python | python/paddle/fluid/dygraph/dygraph_to_static/basic_api_transformer.py | zmxdream/Paddle | 04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c | [
"Apache-2.0"
] | 17,085 | 2016-11-18T06:40:52.000Z | 2022-03-31T22:52:32.000Z | python/paddle/fluid/dygraph/dygraph_to_static/basic_api_transformer.py | zmxdream/Paddle | 04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c | [
"Apache-2.0"
] | 29,769 | 2016-11-18T06:35:22.000Z | 2022-03-31T16:46:15.000Z | python/paddle/fluid/dygraph/dygraph_to_static/basic_api_transformer.py | zmxdream/Paddle | 04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c | [
"Apache-2.0"
] | 4,641 | 2016-11-18T07:43:33.000Z | 2022-03-31T15:15:02.000Z | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import astor
from paddle.utils import gast
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import AstNodeWrapper
from paddle.fluid.dygraph.dygraph_to_static import utils
class BasicApiTransformer(gast.NodeTransformer):
"""
Class to transform basic API from dygraph to static graph.
"""
def __init__(self, wrapper_root):
assert isinstance(
wrapper_root, AstNodeWrapper
), "Input non-AstNodeWrapper node for the initialization of BasicApiTransformer."
self.wrapper_root = wrapper_root
self.root = wrapper_root.node
self.class_node_dict = {}
def transform(self):
to_tensor_transformer = ToTensorTransformer(self.root)
to_tensor_transformer.transform()
self.visit(self.root)
return self.wrapper_root
def visit_Assign(self, node):
if self._update_class_node_dict(node):
return None
for child_node in gast.walk(node.value):
if isinstance(child_node, gast.Call):
self._visit_Call(child_node)
return node
def visit_Expr(self, node):
value_node = node.value
for child_node in gast.walk(value_node):
if isinstance(child_node, gast.Call):
# TODO(liym27):
# Considers that a dygraph api which modifies the input or has a output.
if utils.is_dygraph_api(child_node):
return
else:
self._visit_Call(child_node)
return node
def _visit_Call(self, node):
assert isinstance(node, gast.Call)
func_name = astor.to_source(gast.gast_to_ast(node.func))
if self._is_dygraph_forward(func_name):
class_node = self._get_class_node(func_name)
static_node = utils.to_static_ast(node, class_node)
return static_node
else:
return node
def _is_dygraph_forward(self, func_id):
return func_id in self.class_node_dict
def _get_class_node(self, func_id):
return self.class_node_dict[func_id]
def _update_class_node_dict(self, node):
assert isinstance(node, gast.Assign)
node_value = node.value
if isinstance(node_value, gast.Call):
if is_to_variable(node_value):
return False
if utils.is_dygraph_api(node_value):
dygraph_api = node_value.func.attr
if not utils.dygraph_class_to_static_api.get(dygraph_api):
return False
utils.update_args_of_func(node_value, node_value, "__init__")
target_str = astor.to_source(gast.gast_to_ast(node.targets[0]))
self.class_node_dict[target_str] = node_value
return True
# TODO: node.value is not dygraph class
return False
class ToTensorTransformer(gast.NodeTransformer):
"""
Class to transform paddle.to_tensor and paddle.to_variable to paddle.assign
"""
def __init__(self, node):
assert isinstance(
node, gast.AST
), "Input non-gast.AST node for the initialization of ToTensorTransformer."
self.root = node
def transform(self):
self.visit(self.root)
return self.root
def visit_Call(self, node):
assert isinstance(node, gast.Call)
if is_to_variable(node):
node = to_assign_node(node)
self.generic_visit(node)
return node
def is_to_variable(node):
assert isinstance(node, gast.Call)
api_name = utils.ast_to_source_code(node.func).strip()
if utils.is_dygraph_api(node):
return api_name.endswith("to_variable")
if utils.is_paddle_api(node):
return api_name.endswith("to_tensor")
return False
def to_assign_node(node):
# Transform dygraph api `fluid.dygraph.to_variable` alias `paddle.to_tensor` to static api `paddle.assign`.
# NOTE:
# 1. Api `to_variable` supports data type {float16, float32, float64, int16, int32, int64, uint8, uint16},
# but api `assign` only supports {float32, float64, int32, int64, bool};
# 2. If the input of api `assign` is numpy.ndarray, its size cannot be greater than 1024 * 1024.
assert isinstance(node, gast.Call)
assign_api = gast.parse('paddle.assign').body[0].value
node.func = assign_api
if node.args:
node.args = [node.args[0]]
node.keywords = []
else:
for idx, kw in enumerate(node.keywords):
if kw.arg == 'value' or kw.arg == 'data':
node.keywords[idx].arg = 'x'
node.keywords = [node.keywords[idx]]
node.args = []
break
return node
| 33.685535 | 112 | 0.648992 |
acf2c413903f7929c5c4a8152dff8564c23901eb | 3,605 | py | Python | telnyx-python/telnyx/api_resources/abstract/nested_resource_class_methods.py | team-telnyx/telnyx-2fa | 6b8acb6703dc9458176c97d7fa6d1fd9c303bfbd | [
"MIT"
] | null | null | null | telnyx-python/telnyx/api_resources/abstract/nested_resource_class_methods.py | team-telnyx/telnyx-2fa | 6b8acb6703dc9458176c97d7fa6d1fd9c303bfbd | [
"MIT"
] | 3 | 2020-03-24T18:09:34.000Z | 2021-02-02T22:37:30.000Z | telnyx-python/telnyx/api_resources/abstract/nested_resource_class_methods.py | mgwilliams/telnyx-2fa | 49b794c05f42bc6d1c27f722e0d09da6654ad8d2 | [
"MIT"
] | 1 | 2020-01-24T17:39:37.000Z | 2020-01-24T17:39:37.000Z | from __future__ import absolute_import, division, print_function
from telnyx import api_requestor, util
from telnyx.six.moves.urllib.parse import quote_plus
def nested_resource_class_methods(resource, path=None, operations=None):
if path is None:
path = "%ss" % resource
if operations is None:
raise ValueError("operations list required")
def wrapper(cls):
def nested_resource_url(cls, id, nested_id=None):
parts = [cls.class_url()]
if id is not None:
parts.append(quote_plus(id, safe=util.telnyx_valid_id_parts))
parts.append(quote_plus(path, safe="/"))
if nested_id is not None:
parts.append(quote_plus(nested_id, safe=util.telnyx_valid_id_parts))
return "/".join(parts)
resource_url_method = "%ss_url" % resource
setattr(cls, resource_url_method, classmethod(nested_resource_url))
def nested_resource_request(cls, method, url, api_key=None, **params):
requestor = api_requestor.APIRequestor(api_key)
response, api_key = requestor.request(method, url, params)
return util.convert_to_telnyx_object(response, api_key)
resource_request_method = "%ss_request" % resource
setattr(cls, resource_request_method, classmethod(nested_resource_request))
for operation in operations:
if operation == "create":
def create_nested_resource(cls, id, **params):
url = getattr(cls, resource_url_method)(id)
return getattr(cls, resource_request_method)("post", url, **params)
create_method = "create_%s" % resource
setattr(cls, create_method, classmethod(create_nested_resource))
elif operation == "retrieve":
def retrieve_nested_resource(cls, id, nested_id, **params):
url = getattr(cls, resource_url_method)(id, nested_id)
return getattr(cls, resource_request_method)("get", url, **params)
retrieve_method = "retrieve_%s" % resource
setattr(cls, retrieve_method, classmethod(retrieve_nested_resource))
elif operation == "update":
def modify_nested_resource(cls, id, nested_id, **params):
url = getattr(cls, resource_url_method)(id, nested_id)
return getattr(cls, resource_request_method)("post", url, **params)
modify_method = "modify_%s" % resource
setattr(cls, modify_method, classmethod(modify_nested_resource))
elif operation == "delete":
def delete_nested_resource(cls, id, nested_id, **params):
url = getattr(cls, resource_url_method)(id, nested_id)
return getattr(cls, resource_request_method)(
"delete", url, **params
)
delete_method = "delete_%s" % resource
setattr(cls, delete_method, classmethod(delete_nested_resource))
elif operation == "list":
def list_nested_resources(cls, id, **params):
url = getattr(cls, resource_url_method)(id)
return getattr(cls, resource_request_method)("get", url, **params)
list_method = "list_%ss" % resource
setattr(cls, list_method, classmethod(list_nested_resources))
else:
raise ValueError("Unknown operation: %s" % operation)
return cls
return wrapper
| 40.965909 | 87 | 0.611096 |
acf2c43325c963ee5631d137435cb9cb947800f1 | 6,614 | py | Python | Contents/Code/__init__.py | aboroday/JSONAgent.bundle | ab13287b041a94f527131640d67512bf586a787d | [
"MIT"
] | null | null | null | Contents/Code/__init__.py | aboroday/JSONAgent.bundle | ab13287b041a94f527131640d67512bf586a787d | [
"MIT"
] | null | null | null | Contents/Code/__init__.py | aboroday/JSONAgent.bundle | ab13287b041a94f527131640d67512bf586a787d | [
"MIT"
] | null | null | null | # coding=utf-8
"""
JSONMovieAgent
"""
from utils import Mediafile, open_file, load_json_metadata
from logging import PlexLogAdapter as log
# PLEX API
preferences = Prefs
load_file = Core.storage.load
PlexAgent = Agent.Movies
MediaProxy = Proxy.Media
Metadata = MetadataSearchResult
Trailer = TrailerObject
class JSONAgent(PlexAgent):
"""
A Plex Metadata Agent for Movies.
Uses JSON files as the metadata source for Plex Movies.
"""
name = 'JSONAgent'
ver = '0.0.1'
primary_provider = True
languages = [Locale.Language.NoLanguage]
accepts_from = [
'com.plexapp.agents.localmedia',
'com.plexapp.agents.opensubtitles',
'com.plexapp.agents.podnapisi',
'com.plexapp.agents.subzero'
]
contributes_to = [
'com.plexapp.agents.themoviedb',
'com.plexapp.agents.imdb',
'com.plexapp.agents.none'
]
# search function
def search(self, results, media, lang):
log.debug('# Invoking search function')
log.info('{plugin} Version: {number}'.format(
plugin=self.name, number=self.ver))
log.debug('Plex Server Version: {number}'.format(
number=Platform.ServerVersion))
if preferences['debug']:
log.info('Agent debug logging is enabled!')
else:
log.info('Agent debug logging is disabled!')
mediafile = Mediafile(media.items[0].parts[0].file)
json_metadata = load_json_metadata(mediafile)
# Title
try:
media.name = json_metadata.get('title')
except Exception as e:
log.info('ERROR: No \'title\' tag in JSON. Aborting!')
log.debug('Exception: {name}'.format(name=e))
return
# ID
try:
id = json_metadata.get('id').strip()
except:
id = ''
pass
if len(id) > 2:
media.id = id
log.debug('ID from json: {id}'.format(id=media.id))
else:
# if movie id doesn't exist, create
# one based on hash of title and year
def ord3(x):
return '%.3d' % ord(x)
id = int(''.join(map(ord3, media.name + str(media.year))))
id = str(abs(hash(int(id))))
media.id = id
log.debug('ID generated: {id}'.format(id=media.id))
results.Append(Metadata(id=media.id, name=media.name, year=media.year, lang=lang, score=100))
try:
log.info('Found movie information in JSON file:'
' title = {media.name},'
' id = {media.id}'.format(media=media))
except:
pass
# update function
def update(self, metadata, media, lang):
log.debug('# Invoking update function')
log.info('{plugin} Version: {number}'.format(
plugin=self.name, number=self.ver))
log.debug('Plex Server Version: {number}'.format(
number=Platform.ServerVersion))
if preferences['debug']:
log.info('Agent debug logging is enabled!')
else:
log.info('Agent debug logging is disabled!')
mediafile = Mediafile(media.items[0].parts[0].file)
json_metadata = load_json_metadata(mediafile)
log.debug('metadata: {name}'.format(name=metadata))
# Title
if json_metadata.get('title'):
log.debug('title found: {name}'.format(name=json_metadata.get('title')))
metadata.title = json_metadata.get('title')
else:
log.info('ERROR: No \'title\' tag in JSON. Aborting!')
return
# Year
if json_metadata.get('year'):
log.debug('year found: {name}'.format(name=json_metadata.get('year')))
metadata.year = int(json_metadata.get('year'))
# Summary
if json_metadata.get('summary'):
log.debug('summary found: {name}'.format(name=json_metadata.get('summary')))
metadata.summary = json_metadata.get('summary')
# Studio
if json_metadata.get('studio'):
log.debug('studio found: {name}'.format(name=json_metadata.get('studio')))
metadata.studio = json_metadata.get('studio')
# Genre
if json_metadata.get('genre'):
try:
metadata.genres.clear()
for genre in json_metadata.get('genre'):
log.debug('genre found: {name}'.format(name=genre))
metadata.genres.add(genre)
except:
pass
# Country.
if json_metadata.get('country'):
try:
metadata.countries.clear()
for country in json_metadata.get('country'):
country = country.replace('United States of America', 'USA')
log.debug('country found: {name}'.format(name=country))
metadata.countries.add(country)
except:
pass
# Directors.
if json_metadata.get('director'):
try:
metadata.directors.clear()
for movie_director in json_metadata.get('director'):
director = metadata.directors.new()
log.debug('director found: {name}'.format(name=movie_director))
director.name = movie_director
except:
pass
# Writers.
if json_metadata.get('writer'):
try:
metadata.writers.clear()
for movie_writer in json_metadata.get('writer'):
writer = metadata.writers.new()
log.debug('writer found: {name}'.format(name=movie_writer))
writer.name = movie_writer
except:
pass
# Tagline.
if json_metadata.get('tagline'):
log.debug('tagline found: {name}'.format(name=json_metadata.get('tagline')))
metadata.tagline = json_metadata.get('tagline')
# Actors.
if json_metadata.get('actor'):
try:
metadata.roles.clear()
for movie_role in json_metadata.get('actor'):
role = metadata.roles.new()
if movie_role.get('role'):
role.role = movie_role.get('role')
role.name = movie_role.get('name')
log.debug('actor found: {name}, role found: {role}'.format(name=role.name, role=role.role))
except:
pass
return metadata
| 33.40404 | 111 | 0.54808 |
acf2c5ac867866629f188238ae7e1cb386ebd11e | 5,849 | py | Python | utils.py | luozn15/FloorplanGAN | 113813c2e857c5cd4e64c92626d359e5746e9eab | [
"MIT"
] | null | null | null | utils.py | luozn15/FloorplanGAN | 113813c2e857c5cd4e64c92626d359e5746e9eab | [
"MIT"
] | null | null | null | utils.py | luozn15/FloorplanGAN | 113813c2e857c5cd4e64c92626d359e5746e9eab | [
"MIT"
] | null | null | null | from tqdm import tqdm
import pandas as pd
import numpy as np
import os
import pickle
from matplotlib import pyplot as plt
import torch
import torch.nn.functional as F
def name_particular_rooms(path, rooms):
if rooms == None:
print('rooms == None')
return
suffixes = '_'.join(['{}-{}'.format(k, v) for k, v in rooms.items()])
file = os.path.join(path, '../', 'names', 'names_{}.pkl'.format(suffixes))
if os.path.exists(file):
print('names_{}.pkl exists'.format(suffixes))
return
names = os.listdir(path)
names_ = []
for name in tqdm(names):
with open(os.path.join(path, name), 'rb') as pkl_file:
layout = pickle.load(pkl_file)
if np.prod([len(layout[k]) == v for k, v in rooms.items()]) > 0:
names_.append(name)
with open(file, 'wb') as output:
pickle.dump(names_, output)
print('find {} layouts satisfying the rooms requirement'.format(len(names_)))
return
def types_more_than_n(path, n=2000):
file = os.path.join(path, '../', 'names', 'morethan_{}.pkl'.format(str(n)))
if os.path.exists(file):
print('morethan_{}.pkl exists'.format(str(n)))
return
names = os.listdir(path)
rooms = range(10) # (0,1)
num_types = {}
for name in tqdm(names):
with open(os.path.join(path, name), 'rb') as pkl_file:
layout = pickle.load(pkl_file)
if '_'.join([str(len(layout[r])) for r in rooms]) in num_types.keys():
num_types['_'.join([str(len(layout[r])) for r in rooms])] += 1
else:
num_types['_'.join([str(len(layout[r])) for r in rooms])] = 1
num_types = pd.DataFrame.from_dict(num_types, orient='index')
num_types = num_types.sort_values(0, ascending=True)
types = list(num_types[num_types > 2000].dropna().index)
names_ = []
for name in tqdm(names):
with open(os.path.join(path, name), 'rb') as pkl_file:
layout = pickle.load(pkl_file)
if '_'.join([str(len(layout[r])) for r in rooms]) in types:
names_.append(name)
with open(file, 'wb') as output:
pickle.dump(names_, output)
print('find {} types - {} layouts out of {} in total'.format(len(types),
len(names_), len(names)))
return
def bounds_check(generated):
loc = generated[:, :, -4:]
'''xc,yc,area_root,w = loc[:,:,0],loc[:,:,1],loc[:,:,2],loc[:,:,3]
h = area_root**2/w
try:
h[h != h] = 0
except:
pass'''
xc, yc, w, h = loc[:, :, 0], loc[:, :, 1], loc[:, :, 2], loc[:, :, 3]
x0 = xc-0.5*w
x1 = xc+0.5*w
y0 = yc-0.5*h
y1 = yc+0.5*h
def loss_(c):
return (F.relu(c) - c + F.relu(1-c) - (1-c)).sum()
loss = loss_(x0)+loss_(y0)+loss_(x1)+loss_(y1)
return loss
def negative_wh_check(generated):
w = generated[:, :, -2]
h = generated[:, :, -1]
def loss_(c):
return (F.relu(c) - c).sum()
loss = loss_(w)+loss_(h)
return loss
def get_figure(render):
color_table = {
0: (210, 121, 98), # Living room
1: (238, 216, 98), # Master room
2: (83, 103, 52), # Kitchen
3: (118, 142, 168), # Bathroom
4: (82, 79, 115), # Dining room
5: (227, 152, 68), # Child room
6: (145, 177, 101), # Study room
7: (59, 105, 138), # Second room
8: (36, 35, 42), # Guest room
9: (221, 209, 212), # Balcony
}
try:
render = render.detach().cpu().numpy()
except:
pass
rendered_size = render.shape[-1]
batch_size = render.shape[0]
num_channel = render.shape[1]
fig = plt.figure(figsize=(18, 18))
for i in range(batch_size):
img_stack = []
for j in range(num_channel):
num = i*(num_channel+1)+j+1
ax = fig.add_subplot(batch_size, num_channel+1, num)
#img = render[i,j,:,:].view(rendered_size,rendered_size,1).expand(rendered_size,rendered_size,3)
img = np.tile(render[i, j, :, :, np.newaxis], (1, 1, 3))
img *= np.array(color_table[j])
img = img.astype('int')
img_stack.append(img)
ax.imshow(img, cmap='gray', vmin=0, vmax=255)
ax.axes.xaxis.set_visible(False)
ax.axes.yaxis.set_visible(False)
if i == 0:
ax.set_title(str(j))
ax = fig.add_subplot(batch_size, num_channel+1, num+1)
img_stack = np.array(img_stack)
ax.imshow(np.max(img_stack, axis=0), cmap='gray', vmin=0, vmax=255)
if i == 0:
ax.set_title('floorplan')
fig.subplots_adjust(wspace=0.1, hspace=0.1)
return fig
def draw_table(df):
if type(df) == torch.Tensor:
df = df.cpu().detach().numpy()
table = plt.figure(figsize=(16, 2))
ax = table.add_subplot(111)
ax.table(np.around(df, decimals=6), loc='center')
ax.axis('off')
return table
def geo_covariance_loss(generated, real):
covariance_generated = torch.matmul(
generated[:, :, -4:], generated[:, :, -4:].permute(0, 2, 1))
covariance_real = torch.matmul(
real[:, :, -4:], real[:, :, -4:].permute(0, 2, 1))
crit = nn.BCEWithLogitsLoss()
loss = crit(covariance_generated, covariance_real)
return loss
def ratio_loss(real_images, dataset):
weight = real_images[0][:, :, :-4].sum(axis=-1) > 0.5
ratio = real_images[0][:, :, -2].div(real_images[0][:, :, -1])
x = weight*ratio
def print_grad(net):
for name, weight in net.named_parameters():
# print("weight:", weight) # 打印权重,看是否在变化
if weight.requires_grad:
# print("weight:", weight.grad) # 打印梯度,看是否丢失
# 直接打印梯度会出现太多输出,可以选择打印梯度的均值、极值,但如果梯度为None会报错
print("{:50} grad:".format(name), weight.grad.mean())
| 33.232955 | 108 | 0.559925 |
acf2c8488bdffc65062c768d4dc4796e6bdede86 | 1,117 | py | Python | ocdskingfisher/maindatabase/migrations/versions/1cf223a50773_check_error.py | patxiworks/kingfisher | d36aa9fbe06a1533a0624d7ee07d4cb985951204 | [
"BSD-3-Clause"
] | null | null | null | ocdskingfisher/maindatabase/migrations/versions/1cf223a50773_check_error.py | patxiworks/kingfisher | d36aa9fbe06a1533a0624d7ee07d4cb985951204 | [
"BSD-3-Clause"
] | null | null | null | ocdskingfisher/maindatabase/migrations/versions/1cf223a50773_check_error.py | patxiworks/kingfisher | d36aa9fbe06a1533a0624d7ee07d4cb985951204 | [
"BSD-3-Clause"
] | null | null | null | """check-error
Revision ID: 1cf223a50773
Revises: 2001817a568d
Create Date: 2018-06-19 15:32:34.200076
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1cf223a50773'
down_revision = '2001817a568d'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('release_check_error',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('release_id', sa.Integer, sa.ForeignKey("release.id"), index=True,
unique=True, nullable=False),
sa.Column('error', sa.Text, nullable=False)
)
op.create_table('record_check_error',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('record_id', sa.Integer, sa.ForeignKey("record.id"), index=True,
unique=True, nullable=False),
sa.Column('error', sa.Text, nullable=False)
)
def downgrade():
op.drop_table('release_check_error')
op.drop_table('record_check_error')
| 29.394737 | 96 | 0.59624 |
acf2c86350e03ae69b2bb6e3fc1ad1d366d8620f | 2,971 | py | Python | scrapper.py | yescorihuela/webscraping-challenge | b68e563850b32f25242293a87ab916ef8be67b42 | [
"MIT"
] | null | null | null | scrapper.py | yescorihuela/webscraping-challenge | b68e563850b32f25242293a87ab916ef8be67b42 | [
"MIT"
] | null | null | null | scrapper.py | yescorihuela/webscraping-challenge | b68e563850b32f25242293a87ab916ef8be67b42 | [
"MIT"
] | null | null | null | import re
import requests
import pandas
import time
from bs4 import BeautifulSoup
start_time = time.time()
base_url = "http://books.toscrape.com/"
# To get the html contents
r = requests.get(base_url)
c = r.content
# To parse the html
soup = BeautifulSoup(c,"html.parser")
# To extract the first and last page numbers
paging = soup.select('li[class="current"]')[0].text.strip().split(" ")[1:4:2]
start_page = int(paging[0])
last_page = int(paging[1])
web_content_list = []
n = 1 # To get the number of each appended book
for page_number in range(start_page, last_page + 1):
# To form and scrap with pagination
url = base_url + f'/catalogue/page-{page_number}.html'
r = requests.get(url)
c = r.content
soup = BeautifulSoup(c,"html.parser")
# To extract all the articles present in the current page
articles = soup.find_all("article",{"class":"product_pod"})
# Check one by one
for article in articles:
book_link = article.find('div', {'class': 'image_container'}).select("a")[0].attrs['href']
book_url = base_url + f'/catalogue/{book_link}'
book_request = requests.get(book_url)
book_content = book_request.content
book_soup = BeautifulSoup(book_content, "html.parser")
title = book_soup.select("div.col-sm-6.product_main h1")[0].text.strip()
price = book_soup.select("div.col-sm-6.product_main p.price_color")[0].text.strip()
price = re.sub("(€|£)", "", price) # Clean price from currency symbols
stock = book_soup.select("div.col-sm-6.product_main p.instock.availability")[0].text.strip()
stock = re.sub("([\(\)a-zA-Z\s])", "", stock)
category = book_soup.find("ul", {'class': 'breadcrumb'}).find_all("li")[2].text.strip()
cover = base_url + "/".join(book_soup.select("div.item.active img")[0].attrs['src'].split("/")[2:])
table_th = book_soup.find("table", {'class': 'table table-striped'}).find_all("th")
table_td = book_soup.find("table", {'class': 'table table-striped'}).find_all("td")
book_data = {}
book_data['title'] = title
book_data['price'] = price
book_data['stock'] = stock
book_data['category'] = category
book_data['cover'] = cover
book_data['title'] = title
# To check table data
for header, content in zip(table_th, table_td):
clean_key = re.sub("([\s\.\(\)])","_", header.text.strip().lower())
book_data[clean_key] = re.sub("(€|£)", "", content.text.strip())
web_content_list.append(book_data)
print(f"Book {n} added...")
n += 1
df = pandas.DataFrame(web_content_list)
df.to_csv("scrapper_books.csv")
total_duration = time.time() - start_time
print(f'Total duration: {total_duration}')
# These functions are for modularize and clean this solution
def get_site_paging():
pass
def page_content():
pass
def get_book_details():
pass | 32.648352 | 107 | 0.637496 |
acf2ca0a071ab90b9e75c0a75148f48b8cd317d9 | 2,470 | py | Python | scripts/dwf/process_data.py | danjampro/huntsman-drp | 9470c03b87991fbe09e194470f28e8b45785c206 | [
"MIT"
] | null | null | null | scripts/dwf/process_data.py | danjampro/huntsman-drp | 9470c03b87991fbe09e194470f28e8b45785c206 | [
"MIT"
] | null | null | null | scripts/dwf/process_data.py | danjampro/huntsman-drp | 9470c03b87991fbe09e194470f28e8b45785c206 | [
"MIT"
] | null | null | null | """Script to periodically query and process new data. WIP not working yet!"""
import time
from queue import Queue
from threading import Thread
from datetime import datetime, timedelta
from huntsman.drp.datatable import RawDataTable
from huntsman.drp.bulter import TemporaryButlerRepository
FILTER_NAMES = ["g_band", "r_band", "luminance"]
def query_latest_files(datatable, interval):
"""
Get latest filenames specified by a time interval.
Args:
datatable (`huntsman.drp.datatable.RawDataTable`): The raw data table.
interval (float): The time interval in seconds.
Returns:
list of filenames.
"""
time_now = datetime.utcnow()
time_start = time_now - timedelta(seconds=interval)
filenames = datatable.query_column("filename", date_start=time_start, date_end=time_now,
dataType="science")
return filenames
def process_data_async(queue, filter_names=FILTER_NAMES, make_coadd=False, rerun="dwfrerun"):
"""Get queued filename list and start processing it."""
while True:
# Get the next set of filenames
filenames = queue.get()
try:
# Create temp butler repo
with TemporaryButlerRepository() as butler_repository:
# Ingest raw data
butler_repository.ingest_raw_data(filenames)
# Make calexps
for filter_name in filter_names:
butler_repository.processCcd(dataType="science", rerun=rerun,
filter_name=filter_name)
# Assemble coadd
if make_coadd:
butler_repository.make_coadd(rerun=rerun)
except Exception as err:
print(f"Error processing files: {err}.")
finally:
queue.task_done()
if __name__ == "__main__":
# Factor these out as command line args
interval_seconds = 60
datatable = RawDataTable()
queue = Queue()
# Start the queue's worker thread
thread = Thread(target=process_data_async, daemon=False, args=(queue))
while True:
# Get the latest filenames
filenames = datatable.query_latest(seconds=interval_seconds, column_name="filename")
# Queue the filenames for processing
print(f"Queuing {len(filenames)} files.")
queue.put(filenames)
# Wait for next batch
time.sleep(interval_seconds)
| 30.493827 | 93 | 0.640081 |
acf2cb86c261f678190db3953a32380c8936a93a | 632 | py | Python | exe.curso em video/def 70.py | Lorenzo-Lopes/Python-Estudo | 7ee623ce29b6a0e9fac48189fbd9c641be84d418 | [
"MIT"
] | null | null | null | exe.curso em video/def 70.py | Lorenzo-Lopes/Python-Estudo | 7ee623ce29b6a0e9fac48189fbd9c641be84d418 | [
"MIT"
] | null | null | null | exe.curso em video/def 70.py | Lorenzo-Lopes/Python-Estudo | 7ee623ce29b6a0e9fac48189fbd9c641be84d418 | [
"MIT"
] | null | null | null | soma = soma1000 = menor = cont = 0
nomemenor = ' '
while True:
sair = ' '
nome = str(input('Nome:'))
prod = float(input('Valor R$:'))
while sair not in 'SsNn':
sair = str(input('Deseja comprar outro produto?[S/N]')).upper().strip()[0]
cont += 1
if prod >= 1000:
soma1000 += 1
if cont == 1 or prod < menor:
menor = prod
nomemenor = nome
soma += prod
if sair == 'N':
break
print(f'A soma dos final dos produtos foi de R$.{soma}')
print(f'no final {soma1000} custaram mais que R$.1000,00 ')
print(f'Custando {menor} o produto mais barato foi a/o {nomemenor}') | 31.6 | 82 | 0.579114 |
acf2cb9c0b199dcc7f89eb7d8be375f8824d16d2 | 2,656 | py | Python | FabBOM.py | waynegramlich/Fab | d4a23067a0354ffda106f7032df0501c8db24499 | [
"MIT"
] | 1 | 2022-03-20T12:25:34.000Z | 2022-03-20T12:25:34.000Z | FabBOM.py | waynegramlich/Fab | d4a23067a0354ffda106f7032df0501c8db24499 | [
"MIT"
] | null | null | null | FabBOM.py | waynegramlich/Fab | d4a23067a0354ffda106f7032df0501c8db24499 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""FabBOM: Bill of Materials (i.e. BOM) and ordering information.
The general idea is that a project is assembled from both raw materials and manufactured parts
that are created in factories. These factory parts/materials are frequently redistributed to
intermediate vendors that you can actual obtains the desired parts from.
The overall concept is called a Bill Of Materials (i.e. BOM) and it is a surprisingly complex topic.
Eventually, the FreeCAD community will decide what to do with about Bill of Materials management.
Until then, the following place FabBOM place holder classes are used.
Classes:
* FabBOM: An actual specific bill of materials for given project.
* oFabCollection: A collection (e.g. catalog) of parts from vendors/manufacturers.
* FabDetail: Information about a specific part.
* FabFactory: A factory (i.e. manufacturer) that constructs parts.
* FabFactoryDetail: Factory specific information about a specific part.
* FabVendor: A vendor that sells parts to end-users.
* FabVendorOrder: An order for parts from a Vendor.
* FabVendorDetail: Vendor specific information about a specific part.
"""
# <--------------------------------------- 100 characters ---------------------------------------> #
from dataclasses import dataclass
from typing import Tuple
# FabDetail:
class FabDetail(object):
"""FabDetail: More inromation about the object."""
pass
# FabBom:
@dataclass
class FabBOM(object):
"""FabBOM: A Bill of Materials for a project."""
parts: Tuple[FabDetail, ...] # Information about the
# FabCollection:
@dataclass
class FabCollection(dict):
"""FabCollection: A collection (e.g. catalog) of parts."""
# https://stackoverflow.com/questions/4014621/a-python-class-that-acts-like-dict
# FabFactory:
@dataclass
class FabFactory:
"""Information about a factory."""
name: str
address: Tuple[str, ...]
# FabFactoryDetail
# FabVendor:
class FabVendor:
"""Information about a vendor."""
name: str
address: Tuple[str, ...]
# FabVendorDetail:
@dataclass
class FabVendorDetail:
"""FabVendorDetail: A vendor item detail."""
vendor_number: int # The required number to order
key: str # The Vendor part number
description: str # The vendor part description
price: float # The vendor price.
# FabVendorOrder:
@dataclass
class FabVendorOrder:
"""FabVendorOrder: An order for parts."""
details: Tuple[FabVendorDetail, ...]
# _unit_tests():
def _unit_tests() -> None:
"""Unit tests."""
pass
# main():
def main() -> None:
"""main program."""
_unit_tests()
if __name__ == "__main__":
main()
| 26.56 | 100 | 0.701054 |
acf2cc0b5de45eeaf5bab596e1106ecd73c05f59 | 2,934 | py | Python | precise/scripts/calc_threshold.py | wannaphongcom/mycroft-precise | a7d0fbdd9c2f7d3467766a3381782f208a6d3149 | [
"Apache-2.0"
] | 3 | 2020-11-14T20:10:36.000Z | 2021-04-02T15:02:39.000Z | precise/scripts/calc_threshold.py | wannaphongcom/mycroft-precise | a7d0fbdd9c2f7d3467766a3381782f208a6d3149 | [
"Apache-2.0"
] | 8 | 2021-04-26T14:43:19.000Z | 2021-08-07T12:08:19.000Z | precise/scripts/calc_threshold.py | wannaphongcom/mycroft-precise | a7d0fbdd9c2f7d3467766a3381782f208a6d3149 | [
"Apache-2.0"
] | 1 | 2021-04-02T15:43:50.000Z | 2021-04-02T15:43:50.000Z | #!/usr/bin/env python3
# Copyright 2019 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
from math import sqrt
from os.path import basename, splitext
from prettyparse import Usage
from precise.params import inject_params, save_params
from precise.scripts.base_script import BaseScript
from precise.stats import Stats
class CalcThresholdScript(BaseScript):
usage = Usage('''
Update the threshold values of a model for a dataset.
This makes the sensitivity more accurate and linear
:model str
Either Keras (.net) or TensorFlow (.pb) model to adjust
:input_file str
Input stats file that was outputted from precise-graph
:-k --model-key str -
Custom model name to use from the stats.json
:-s --smoothing float 1.2
Amount of extra smoothing to apply
:-c --center float 0.2
Decoded threshold that is mapped to 0.5. Proportion of
false negatives at sensitivity=0.5
''')
def __init__(self, args):
super().__init__(args)
def run(self):
args = self.args
import numpy as np
model_data = {
name: Stats.from_np_dict(data) for name, data in np.load(args.input_file)['data'].item().items()
}
model_name = args.model_key or basename(splitext(args.model)[0])
if model_name not in model_data:
print("Could not find model '{}' in saved models in stats file: {}".format(model_name, list(model_data)))
raise SystemExit(1)
stats = model_data[model_name]
save_spots = (stats.outputs != 0) & (stats.outputs != 1)
if save_spots.sum() == 0:
print('No data (or all NaN)')
return
stats.outputs = stats.outputs[save_spots]
stats.targets = stats.targets[save_spots]
inv = -np.log(1 / stats.outputs - 1)
pos = np.extract(stats.targets > 0.5, inv)
pos_mu = pos.mean().item()
pos_std = sqrt(np.mean((pos - pos_mu) ** 2)) * args.smoothing
print('Peak: {:.2f} mu, {:.2f} std'.format(pos_mu, pos_std))
pr = inject_params(args.model)
pr.__dict__.update(threshold_config=(
(pos_mu, pos_std),
))
save_params(args.model)
print('Saved params to {}.params'.format(args.model))
main = CalcThresholdScript.run_main
if __name__ == '__main__':
main()
| 32.241758 | 117 | 0.644853 |
acf2cc7778ab1533ecc753e65c241257cc58c83d | 3,339 | py | Python | src/CompetAnalytic.py | RadjahDri/paraglide-compet-analytics | 5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56 | [
"MIT"
] | null | null | null | src/CompetAnalytic.py | RadjahDri/paraglide-compet-analytics | 5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56 | [
"MIT"
] | null | null | null | src/CompetAnalytic.py | RadjahDri/paraglide-compet-analytics | 5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56 | [
"MIT"
] | null | null | null | from TrackTurnpointStats import TrackTurnpointStats
import datetime
### CONSTANTS
MAX_GLIDER_SPEED_IN_KMH = 100
MAX_GLIDER_SPEED_IN_MS = MAX_GLIDER_SPEED_IN_KMH / 3.6
### CLASSES
class CompetAnalytic:
def __init__(self, task):
if(not task):
raise ValueError("CompetAnalytic cannot be initialized with empty task")
self.task = task
self.competitorTracks = []
def addCompetitorTrack(self, track):
self.competitorTracks.append(track)
def getTurnpointsStats(self):
if(not self.competitorTracks):
raise ValueError("CompetAnalytic cannot generate stats without competitor tracks")
competitionTracksStats = []
for track in self.competitorTracks:
trackStats = TrackTurnpointStats(track, len(self.task.turnpoints[1:]))
trackPoint = track.getPointAtTime(self.task.startTime)
for turnpointIdx in range(1, len(self.task.turnpoints)):
if(turnpointIdx == 1):
currentBeginSearchTime = self.task.startTime
else:
currentBeginSearchTime = trackPoint.time
distanceToTurnpoint = trackPoint.coordinates.computeDistance(self.task.turnpoints[turnpointIdx].coordinates) - self.task.turnpoints[turnpointIdx].radius
minTimeToNextTurnpoint = distanceToTurnpoint // MAX_GLIDER_SPEED_IN_MS
currentBeginSearchTime = addTimes(currentBeginSearchTime, minTimeToNextTurnpoint)
trackPointIdx = track.searchPointInTurnpoint(self.task.turnpoints[turnpointIdx], currentBeginSearchTime, self.task.endTime)
if(trackPointIdx == None):
break
trackStats.addTurnpointStats(trackPointIdx)
trackPoint = track.trackPoints[trackPointIdx+1]
competitionTracksStats.append(trackStats)
competitionTracksStats.sort(reverse=True)
return competitionTracksStats
def exportTimeToCsv(self, competitionTracksStats, outputFilePath=None):
csvData = "Pilote,Voile,Classement,Start"
for turnpointIdx in range(1, len(self.task.turnpoints) - 3):
csvData += ",B%d" % turnpointIdx
csvData += ",ESS\n"
ranking = 1
for competitionTrackStats in competitionTracksStats:
csvData += competitionTrackStats.exportTimeToCsv(ranking)
ranking += 1
if(outputFilePath):
with open(outputFilePath, "w") as outputFile:
outputFile.write(csvData)
return csvData
def exportAltitudeToCsv(self, competitionTracksStats, outputFilePath=None):
csvData = "Pilote,Voile,Classement,Start"
for turnpointIdx in range(1, len(self.task.turnpoints) - 3):
csvData += ",B%d" % turnpointIdx
csvData += ",ESS,Goal\n"
ranking = 1
for competitionTrackStats in competitionTracksStats:
csvData += competitionTrackStats.exportAltitudeToCsv(ranking)
ranking += 1
if(outputFilePath):
with open(outputFilePath, "w") as outputFile:
outputFile.write(csvData)
return csvData
def addTimes(time, seconds):
return (datetime.datetime(1,1,1,time.hour, time.minute, time.second) + datetime.timedelta(0, seconds)).time()
| 35.903226 | 168 | 0.661276 |
acf2cd9f87001f04c68fd9fcae7d8f84ef286868 | 13,829 | py | Python | _cxx/envoy/tools/code_format/check_format_test_helper.py | Asher-Wang/ambassador | 393a52832d081e0d8d0e0ecd5a14cfe18c62b837 | [
"Apache-2.0"
] | 3 | 2020-06-04T03:26:32.000Z | 2020-06-04T03:26:45.000Z | _cxx/envoy/tools/code_format/check_format_test_helper.py | Asher-Wang/ambassador | 393a52832d081e0d8d0e0ecd5a14cfe18c62b837 | [
"Apache-2.0"
] | 1 | 2020-10-30T03:55:32.000Z | 2020-10-30T03:55:32.000Z | _cxx/envoy/tools/code_format/check_format_test_helper.py | Asher-Wang/ambassador | 393a52832d081e0d8d0e0ecd5a14cfe18c62b837 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Tests check_format.py. This must be run in a context where the clang
# version and settings are compatible with the one in the Envoy
# docker. Normally this is run via check_format_test.sh, which
# executes it in under docker.
from __future__ import print_function
from run_command import runCommand
import argparse
import logging
import os
import shutil
import sys
import tempfile
curr_dir = os.path.dirname(os.path.realpath(__file__))
tools = os.path.dirname(curr_dir)
src = os.path.join(tools, 'testdata', 'check_format')
check_format = sys.executable + " " + os.path.join(curr_dir, 'check_format.py')
errors = 0
# Runs the 'check_format' operation, on the specified file, printing
# the comamnd run and the status code as well as the stdout, and returning
# all of that to the caller.
def runCheckFormat(operation, filename):
command = check_format + " " + operation + " " + filename
status, stdout, stderr = runCommand(command)
return (command, status, stdout + stderr)
def getInputFile(filename, extra_input_files=None):
files_to_copy = [filename]
if extra_input_files is not None:
files_to_copy.extend(extra_input_files)
for f in files_to_copy:
infile = os.path.join(src, f)
directory = os.path.dirname(f)
if not directory == '' and not os.path.isdir(directory):
os.makedirs(directory)
shutil.copyfile(infile, f)
return filename
# Attempts to fix file, returning a 4-tuple: the command, input file name,
# output filename, captured stdout as an array of lines, and the error status
# code.
def fixFileHelper(filename, extra_input_files=None):
command, status, stdout = runCheckFormat(
"fix", getInputFile(filename, extra_input_files=extra_input_files))
infile = os.path.join(src, filename)
return command, infile, filename, status, stdout
# Attempts to fix a file, returning the status code and the generated output.
# If the fix was successful, the diff is returned as a string-array. If the file
# was not fixable, the error-messages are returned as a string-array.
def fixFileExpectingSuccess(file, extra_input_files=None):
command, infile, outfile, status, stdout = fixFileHelper(file,
extra_input_files=extra_input_files)
if status != 0:
print("FAILED: " + infile)
emitStdoutAsError(stdout)
return 1
status, stdout, stderr = runCommand('diff ' + outfile + ' ' + infile + '.gold')
if status != 0:
print("FAILED: " + infile)
emitStdoutAsError(stdout + stderr)
return 1
return 0
def fixFileExpectingNoChange(file):
command, infile, outfile, status, stdout = fixFileHelper(file)
if status != 0:
return 1
status, stdout, stderr = runCommand('diff ' + outfile + ' ' + infile)
if status != 0:
logging.error(file + ': expected file to remain unchanged')
return 1
return 0
def emitStdoutAsError(stdout):
logging.error("\n".join(stdout))
def expectError(filename, status, stdout, expected_substring):
if status == 0:
logging.error("%s: Expected failure `%s`, but succeeded" % (filename, expected_substring))
return 1
for line in stdout:
if expected_substring in line:
return 0
logging.error("%s: Could not find '%s' in:\n" % (filename, expected_substring))
emitStdoutAsError(stdout)
return 1
def fixFileExpectingFailure(filename, expected_substring):
command, infile, outfile, status, stdout = fixFileHelper(filename)
return expectError(filename, status, stdout, expected_substring)
def checkFileExpectingError(filename, expected_substring, extra_input_files=None):
command, status, stdout = runCheckFormat(
"check", getInputFile(filename, extra_input_files=extra_input_files))
return expectError(filename, status, stdout, expected_substring)
def checkAndFixError(filename, expected_substring, extra_input_files=None):
errors = checkFileExpectingError(filename,
expected_substring,
extra_input_files=extra_input_files)
errors += fixFileExpectingSuccess(filename, extra_input_files=extra_input_files)
return errors
def checkToolNotFoundError():
# Temporarily change PATH to test the error about lack of external tools.
oldPath = os.environ["PATH"]
os.environ["PATH"] = "/sbin:/usr/sbin"
clang_format = os.getenv("CLANG_FORMAT", "clang-format-9")
# If CLANG_FORMAT points directly to the binary, skip this test.
if os.path.isfile(clang_format) and os.access(clang_format, os.X_OK):
os.environ["PATH"] = oldPath
return 0
errors = checkFileExpectingError("no_namespace_envoy.cc", "Command %s not found." % clang_format)
os.environ["PATH"] = oldPath
return errors
def checkUnfixableError(filename, expected_substring):
errors = checkFileExpectingError(filename, expected_substring)
errors += fixFileExpectingFailure(filename, expected_substring)
return errors
def checkFileExpectingOK(filename):
command, status, stdout = runCheckFormat("check", getInputFile(filename))
if status != 0:
logging.error("Expected %s to have no errors; status=%d, output:\n" % (filename, status))
emitStdoutAsError(stdout)
return status + fixFileExpectingNoChange(filename)
def runChecks():
errors = 0
# The following error is the error about unavailability of external tools.
errors += checkToolNotFoundError()
# The following errors can be detected but not fixed automatically.
errors += checkUnfixableError("no_namespace_envoy.cc",
"Unable to find Envoy namespace or NOLINT(namespace-envoy)")
errors += checkUnfixableError("mutex.cc", "Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("condition_variable.cc",
"Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("condition_variable_any.cc",
"Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("shared_mutex.cc", "shared_mutex")
errors += checkUnfixableError("shared_mutex.cc", "shared_mutex")
real_time_inject_error = (
"Don't reference real-world time sources from production code; use injection")
errors += checkUnfixableError("real_time_source.cc", real_time_inject_error)
errors += checkUnfixableError("real_time_system.cc", real_time_inject_error)
errors += checkUnfixableError("system_clock.cc", real_time_inject_error)
errors += checkUnfixableError("steady_clock.cc", real_time_inject_error)
errors += checkUnfixableError(
"unpack_to.cc", "Don't use UnpackTo() directly, use MessageUtil::unpackTo() instead")
errors += checkUnfixableError("condvar_wait_for.cc", real_time_inject_error)
errors += checkUnfixableError("sleep.cc", real_time_inject_error)
errors += checkUnfixableError("std_atomic_free_functions.cc", "std::atomic_*")
errors += checkUnfixableError("std_get_time.cc", "std::get_time")
errors += checkUnfixableError("no_namespace_envoy.cc",
"Unable to find Envoy namespace or NOLINT(namespace-envoy)")
errors += checkUnfixableError("bazel_tools.BUILD", "unexpected @bazel_tools reference")
errors += checkUnfixableError("proto.BUILD", "unexpected direct external dependency on protobuf")
errors += checkUnfixableError("proto_deps.cc", "unexpected direct dependency on google.protobuf")
errors += checkUnfixableError("attribute_packed.cc", "Don't use __attribute__((packed))")
errors += checkUnfixableError("designated_initializers.cc", "Don't use designated initializers")
errors += checkUnfixableError("elvis_operator.cc", "Don't use the '?:' operator")
errors += checkUnfixableError("testing_test.cc",
"Don't use 'using testing::Test;, elaborate the type instead")
errors += checkUnfixableError(
"serialize_as_string.cc",
"Don't use MessageLite::SerializeAsString for generating deterministic serialization")
errors += checkUnfixableError(
"version_history/current.rst",
"Version history not in alphabetical order (zzzzz vs aaaaa): please check placement of line")
errors += checkUnfixableError(
"version_history/current.rst",
"Version history not in alphabetical order (this vs aaaa): please check placement of line")
errors += checkUnfixableError(
"version_history/current.rst",
"Version history line malformed. Does not match VERSION_HISTORY_NEW_LINE_REGEX in "
"check_format.py")
errors += checkUnfixableError(
"counter_from_string.cc",
"Don't lookup stats by name at runtime; use StatName saved during construction")
errors += checkUnfixableError(
"gauge_from_string.cc",
"Don't lookup stats by name at runtime; use StatName saved during construction")
errors += checkUnfixableError(
"histogram_from_string.cc",
"Don't lookup stats by name at runtime; use StatName saved during construction")
errors += checkUnfixableError(
"regex.cc", "Don't use std::regex in code that handles untrusted input. Use RegexMatcher")
errors += checkUnfixableError(
"grpc_init.cc",
"Don't call grpc_init() or grpc_shutdown() directly, instantiate Grpc::GoogleGrpcContext. " +
"See #8282")
errors += checkUnfixableError(
"grpc_shutdown.cc",
"Don't call grpc_init() or grpc_shutdown() directly, instantiate Grpc::GoogleGrpcContext. " +
"See #8282")
errors += checkUnfixableError("clang_format_double_off.cc", "clang-format nested off")
errors += checkUnfixableError("clang_format_trailing_off.cc", "clang-format remains off")
errors += checkUnfixableError("clang_format_double_on.cc", "clang-format nested on")
errors += fixFileExpectingFailure(
"api/missing_package.proto",
"Unable to find package name for proto file: ./api/missing_package.proto")
errors += checkUnfixableError("proto_enum_mangling.cc",
"Don't use mangled Protobuf names for enum constants")
errors += checkUnfixableError("test_naming.cc",
"Test names should be CamelCase, starting with a capital letter")
errors += checkUnfixableError(
"test/register_factory.cc",
"Don't use Registry::RegisterFactory or REGISTER_FACTORY in tests, use "
"Registry::InjectFactory instead.")
errors += checkUnfixableError("strerror.cc",
"Don't use strerror; use Envoy::errorDetails instead")
# The following files have errors that can be automatically fixed.
errors += checkAndFixError("over_enthusiastic_spaces.cc",
"./over_enthusiastic_spaces.cc:3: over-enthusiastic spaces")
errors += checkAndFixError("extra_enthusiastic_spaces.cc",
"./extra_enthusiastic_spaces.cc:3: over-enthusiastic spaces")
errors += checkAndFixError("angle_bracket_include.cc",
"envoy includes should not have angle brackets")
errors += checkAndFixError("proto_style.cc", "incorrect protobuf type reference")
errors += checkAndFixError("long_line.cc", "clang-format check failed")
errors += checkAndFixError("header_order.cc", "header_order.py check failed")
errors += checkAndFixError("clang_format_on.cc",
"./clang_format_on.cc:7: over-enthusiastic spaces")
# Validate that a missing license is added.
errors += checkAndFixError("license.BUILD", "envoy_build_fixer check failed")
# Validate that an incorrect license is replaced and reordered.
errors += checkAndFixError("update_license.BUILD", "envoy_build_fixer check failed")
# Validate that envoy_package() is added where there is an envoy_* rule occurring.
errors += checkAndFixError("add_envoy_package.BUILD", "envoy_build_fixer check failed")
# Validate that we don't add envoy_package() when no envoy_* rule.
errors += checkFileExpectingOK("skip_envoy_package.BUILD")
# Validate that we clean up gratuitous blank lines.
errors += checkAndFixError("canonical_spacing.BUILD", "envoy_build_fixer check failed")
# Validate that unused loads are removed.
errors += checkAndFixError("remove_unused_loads.BUILD", "envoy_build_fixer check failed")
# Validate that API proto package deps are computed automagically.
errors += checkAndFixError("canonical_api_deps.BUILD",
"envoy_build_fixer check failed",
extra_input_files=[
"canonical_api_deps.cc", "canonical_api_deps.h",
"canonical_api_deps.other.cc"
])
errors += checkAndFixError("bad_envoy_build_sys_ref.BUILD", "Superfluous '@envoy//' prefix")
errors += checkAndFixError("proto_format.proto", "clang-format check failed")
errors += checkAndFixError(
"cpp_std.cc",
"term absl::make_unique< should be replaced with standard library term std::make_unique<")
errors += checkFileExpectingOK("real_time_source_override.cc")
errors += checkFileExpectingOK("time_system_wait_for.cc")
errors += checkFileExpectingOK("clang_format_off.cc")
return errors
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='tester for check_format.py.')
parser.add_argument('--log', choices=['INFO', 'WARN', 'ERROR'], default='INFO')
args = parser.parse_args()
logging.basicConfig(format='%(message)s', level=args.log)
# Now create a temp directory to copy the input files, so we can fix them
# without actually fixing our testdata. This requires chdiring to the temp
# directory, so it's annoying to comingle check-tests and fix-tests.
with tempfile.TemporaryDirectory() as tmp:
os.chdir(tmp)
errors = runChecks()
if errors != 0:
logging.error("%d FAILURES" % errors)
exit(1)
logging.warning("PASS")
| 46.719595 | 99 | 0.715019 |
acf2cdba0a02ad280bd3731f3ba4f9f1e5da8994 | 618 | py | Python | python/stalin_sort_listcomp.py | twofist/stalin-sort | 46d9434bde96daf6c5c957b60b928fc9eb6e4006 | [
"MIT"
] | 1,140 | 2018-10-30T13:03:09.000Z | 2022-03-29T22:41:24.000Z | python/stalin_sort_listcomp.py | twofist/stalin-sort | 46d9434bde96daf6c5c957b60b928fc9eb6e4006 | [
"MIT"
] | 77 | 2018-10-30T13:20:15.000Z | 2021-11-06T03:44:55.000Z | python/stalin_sort_listcomp.py | twofist/stalin-sort | 46d9434bde96daf6c5c957b60b928fc9eb6e4006 | [
"MIT"
] | 245 | 2018-10-30T13:10:53.000Z | 2022-03-14T08:13:56.000Z | # To use, define an array `mixed` and run `next(ss_listcomp)`
# just as in the tests
ss_listcomp = \
(
# core part
[l for l in [[float('-inf')]]
for i in mixed
if l[-1]<=i and not l.append(i)
][0][1:]
# infinite generator that always returns the core part
for il in [[None]] for _ in il if not il.append(None))
# tests
if __name__ == "__main__":
import random
from stalin_sort import sort # from same directory
mixed = list(range(2**10))
for _ in range(100):
random.shuffle(mixed)
assert next(ss_listcomp) == sort(mixed)
print("all tests passed")
| 24.72 | 61 | 0.618123 |
acf2cdcb4e37c06321b2acb2a9cf5ba646ff0ebf | 340 | py | Python | src/NodeList/SPHSmoothingScaleInst.cc.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 1 | 2020-10-21T01:56:55.000Z | 2020-10-21T01:56:55.000Z | src/NodeList/SPHSmoothingScaleInst.cc.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null | src/NodeList/SPHSmoothingScaleInst.cc.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null | text = """
//------------------------------------------------------------------------------
// Explicit instantiation.
//------------------------------------------------------------------------------
#include "SPHSmoothingScale.cc"
#include "Geometry/Dimension.hh"
template class Spheral::SPHSmoothingScale<Spheral::Dim< %(ndim)s > >;
"""
| 34 | 80 | 0.379412 |
acf2cebe6b49e648eeb228ac31615e9a69f12f25 | 391 | py | Python | common_utils/json_utils.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | common_utils/json_utils.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | common_utils/json_utils.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.utils.encoding import force_str
def create_json_response(data, **kwargs):
body = force_str(json.dumps(data, cls=DjangoJSONEncoder))
return HttpResponse(body, content_type="application/json", **kwargs)
| 27.928571 | 69 | 0.797954 |
acf2cec8d760f890dad0cc05212d09175d845257 | 12,509 | py | Python | tests/unit/response/test_rest.py | brenordv/tavern | f88f954c0bfc0883c3027ab009a74e65fd892f67 | [
"MIT"
] | 3 | 2020-04-17T15:30:51.000Z | 2020-08-15T05:51:38.000Z | tests/unit/response/test_rest.py | brenordv/tavern | f88f954c0bfc0883c3027ab009a74e65fd892f67 | [
"MIT"
] | null | null | null | tests/unit/response/test_rest.py | brenordv/tavern | f88f954c0bfc0883c3027ab009a74e65fd892f67 | [
"MIT"
] | 1 | 2020-04-19T10:08:50.000Z | 2020-04-19T10:08:50.000Z | import pytest
from mock import Mock, patch
from tavern._plugins.rest.response import RestResponse
from tavern.util.dict_util import format_keys
from tavern.util.loader import ANYTHING
from tavern.util import exceptions
@pytest.fixture(name="example_response")
def fix_example_response():
spec = {
"status_code": 302,
"headers": {
"Content-Type": "application/json",
"location": "www.google.com?search=breadsticks",
},
"body": {"a_thing": "authorization_code", "code": "abc123"},
}
return spec.copy()
@pytest.fixture(name="nested_response")
def fix_nested_response():
# https://github.com/taverntesting/tavern/issues/45
spec = {
"status_code": 200,
"headers": {"Content-Type": "application/json"},
"body": {"users": [{"u": {"user_id": "def456"}}]},
}
return spec.copy()
@pytest.fixture(name="nested_schema")
def fix_nested_schema():
# https://github.com/taverntesting/tavern/issues/45
spec = {
"status_code": 200,
"headers": {"Content-Type": "application/json"},
"body": {"users": [{"u": {"user_id": "{code}"}}]},
}
return spec.copy()
class TestSave:
def test_save_body(self, example_response, includes):
"""Save a key from the body into the right name
"""
example_response["save"] = {"body": {"test_code": "code"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(
"body", example_response["body"]
)
assert saved == {"test_code": example_response["body"]["code"]}
def test_save_body_nested(self, example_response, includes):
"""Save a key from the body into the right name
"""
example_response["body"]["nested"] = {"subthing": "blah"}
example_response["save"] = {"body": {"test_nested_thing": "nested.subthing"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(
"body", example_response["body"]
)
assert saved == {
"test_nested_thing": example_response["body"]["nested"]["subthing"]
}
def test_save_body_nested_list(self, example_response, includes):
"""Save a key from the body into the right name
"""
example_response["body"]["nested"] = {"subthing": ["abc", "def"]}
example_response["save"] = {"body": {"test_nested_thing": "nested.subthing.0"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(
"body", example_response["body"]
)
assert saved == {
"test_nested_thing": example_response["body"]["nested"]["subthing"][0]
}
def test_save_header(self, example_response, includes):
"""Save a key from the headers into the right name
"""
example_response["save"] = {"headers": {"next_location": "location"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(
"headers", example_response["headers"]
)
assert saved == {"next_location": example_response["headers"]["location"]}
def test_save_redirect_query_param(self, example_response, includes):
"""Save a key from the query parameters of the redirect location
"""
example_response["save"] = {"redirect_query_params": {"test_search": "search"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(
"redirect_query_params", {"search": "breadsticks"}
)
assert saved == {"test_search": "breadsticks"}
@pytest.mark.parametrize("save_from", ("body", "headers", "redirect_query_params"))
def test_bad_save(self, save_from, example_response, includes):
example_response["save"] = {save_from: {"abc": "123"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
saved = r.maybe_get_save_values_from_save_block(save_from, {})
assert not saved
assert r.errors
class TestValidate:
def test_simple_validate_body(self, example_response, includes):
"""Make sure a simple value comparison works
"""
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", example_response["body"])
assert not r.errors
def test_validate_list_body(self, example_response, includes):
"""Make sure a list response can be validated
"""
example_response["body"] = ["a", 1, "b"]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", example_response["body"])
assert not r.errors
def test_validate_list_body_wrong_order(self, example_response, includes):
"""Order of list items matters
"""
example_response["body"] = ["a", 1, "b"]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", example_response["body"][::-1])
assert r.errors
def test_validate_nested_body(self, example_response, includes):
"""Make sure a nested value comparison works
"""
example_response["body"]["nested"] = {"subthing": "blah"}
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", example_response["body"])
assert not r.errors
def test_simple_validate_headers(self, example_response, includes):
"""Make sure a simple value comparison works
"""
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("headers", example_response["headers"])
assert not r.errors
def test_simple_validate_redirect_query_params(self, example_response, includes):
"""Make sure a simple value comparison works
"""
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("redirect_query_params", {"search": "breadsticks"})
assert not r.errors
def test_validate_missing_list_key(self, example_response, includes):
"""If we expect 4 items and 3 were returned, catch error"""
example_response["body"] = ["a", 1, "b", "c"]
bad_expected = example_response["body"][:-1]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", bad_expected)
assert r.errors
def test_validate_wrong_list_dict(self, example_response, includes):
"""We expected a list, but we got a dict in the response"""
example_response["body"] = ["a", 1, "b", "c"]
bad_expected = {"a": "b"}
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", bad_expected)
assert r.errors
def test_validate_wrong_dict_list(self, example_response, includes):
"""We expected a dict, but we got a list in the response"""
example_response["body"] = {"a": "b"}
bad_expected = ["a", "b", "c"]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", bad_expected)
assert r.errors
class TestMatchStatusCodes:
def test_validate_single_status_code_passes(self, example_response, includes):
"""single status code match"""
example_response["status_code"] = 100
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._check_status_code(100, {})
assert not r.errors
def test_validate_single_status_code_incorrect(self, example_response, includes):
"""single status code mismatch"""
example_response["status_code"] = 100
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._check_status_code(102, {})
assert r.errors
def test_validate_multiple_status_codes_passes(self, example_response, includes):
"""Check it can match mutliple status codes"""
example_response["status_code"] = [100, 200, 300]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._check_status_code(100, {})
assert not r.errors
def test_validate_multiple_status_codes_missing(self, example_response, includes):
"""Status code was not in list"""
example_response["status_code"] = [100, 200, 300]
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._check_status_code(103, {})
assert r.errors
class TestNestedValidate:
def test_validate_nested_null(self, example_response, includes):
"""Check that nested 'null' comparisons work
This will be removed in a future version
"""
example_response["body"] = {"nested": {"subthing": None}}
expected = {"nested": {"subthing": "blah"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
with pytest.warns(FutureWarning):
r._validate_block("body", expected)
assert not r.errors
def test_validate_nested_anything(self, example_response, includes):
"""Check that nested 'anything' comparisons work
This is a bit hacky because we're directly checking the ANYTHING
comparison - need to add an integration test too
"""
example_response["body"] = {"nested": {"subthing": ANYTHING}}
expected = {"nested": {"subthing": "blah"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
r._validate_block("body", expected)
assert not r.errors
class TestFull:
def test_validate_and_save(self, example_response, includes):
"""Test full verification + return saved values
"""
example_response["save"] = {"body": {"test_code": "code"}}
r = RestResponse(Mock(), "Test 1", example_response, includes)
class FakeResponse:
headers = example_response["headers"]
content = "test".encode("utf8")
def json(self):
return example_response["body"]
status_code = example_response["status_code"]
saved = r.verify(FakeResponse())
assert saved == {"test_code": example_response["body"]["code"]}
def test_incorrect_status_code(self, example_response, includes):
"""Test full verification + return saved values
"""
r = RestResponse(Mock(), "Test 1", example_response, includes)
class FakeResponse:
headers = example_response["headers"]
content = "test".encode("utf8")
def json(self):
return example_response["body"]
status_code = 400
with pytest.raises(exceptions.TestFailError):
r.verify(FakeResponse())
assert r.errors
def test_saved_value_in_validate(self, nested_response, nested_schema, includes):
r = RestResponse(
Mock(),
"Test 1",
format_keys(nested_schema, includes["variables"]),
includes,
)
class FakeResponse:
headers = nested_response["headers"]
content = "test".encode("utf8")
def json(self):
return nested_response["body"]
status_code = nested_response["status_code"]
r.verify(FakeResponse())
@pytest.mark.parametrize("value", [1, "some", False, None])
def test_validate_single_value_response(self, example_response, includes, value):
"""Check validating single value response (string, int, etc)."""
del example_response["body"]
r = RestResponse(Mock(), "Test 1", example_response, includes)
class FakeResponse:
headers = example_response["headers"]
content = "test".encode("utf8")
def json(self):
return value
status_code = example_response["status_code"]
r.verify(FakeResponse())
def test_status_code_warns(example_response, includes):
"""Should continue if the status code is nonexistent
"""
example_response["status_code"] = 231234
with patch("tavern._plugins.rest.response.logger.warning") as wmock:
RestResponse(Mock(), "Test 1", example_response, includes)
assert wmock.called
| 30.88642 | 87 | 0.628428 |
acf2cf9e6fdd0ab903615e8edd7e7d949ffee4ac | 20,003 | py | Python | sdk/python/pulumi_azure_native/notificationhubs/v20170401/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/notificationhubs/v20170401/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/notificationhubs/v20170401/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = [
'AdmCredentialArgs',
'ApnsCredentialArgs',
'BaiduCredentialArgs',
'GcmCredentialArgs',
'MpnsCredentialArgs',
'SharedAccessAuthorizationRulePropertiesArgs',
'SkuArgs',
'WnsCredentialArgs',
]
@pulumi.input_type
class AdmCredentialArgs:
def __init__(__self__, *,
auth_token_url: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub AdmCredential.
:param pulumi.Input[str] auth_token_url: The URL of the authorization token.
:param pulumi.Input[str] client_id: The client identifier.
:param pulumi.Input[str] client_secret: The credential secret access key.
"""
if auth_token_url is not None:
pulumi.set(__self__, "auth_token_url", auth_token_url)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
@property
@pulumi.getter(name="authTokenUrl")
def auth_token_url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the authorization token.
"""
return pulumi.get(self, "auth_token_url")
@auth_token_url.setter
def auth_token_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_token_url", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The client identifier.
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The credential secret access key.
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@pulumi.input_type
class ApnsCredentialArgs:
def __init__(__self__, *,
apns_certificate: Optional[pulumi.Input[str]] = None,
app_id: Optional[pulumi.Input[str]] = None,
app_name: Optional[pulumi.Input[str]] = None,
certificate_key: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
key_id: Optional[pulumi.Input[str]] = None,
thumbprint: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub ApnsCredential.
:param pulumi.Input[str] apns_certificate: The APNS certificate. Specify if using Certificate Authentication Mode.
:param pulumi.Input[str] app_id: The issuer (iss) registered claim key. The value is a 10-character TeamId, obtained from your developer account. Specify if using Token Authentication Mode.
:param pulumi.Input[str] app_name: The name of the application or BundleId. Specify if using Token Authentication Mode.
:param pulumi.Input[str] certificate_key: The APNS certificate password if it exists.
:param pulumi.Input[str] endpoint: The APNS endpoint of this credential. If using Certificate Authentication Mode and Sandbox specify 'gateway.sandbox.push.apple.com'. If using Certificate Authentication Mode and Production specify 'gateway.push.apple.com'. If using Token Authentication Mode and Sandbox specify 'https://api.development.push.apple.com:443/3/device'. If using Token Authentication Mode and Production specify 'https://api.push.apple.com:443/3/device'.
:param pulumi.Input[str] key_id: A 10-character key identifier (kid) key, obtained from your developer account. Specify if using Token Authentication Mode.
:param pulumi.Input[str] thumbprint: The APNS certificate thumbprint. Specify if using Certificate Authentication Mode.
:param pulumi.Input[str] token: Provider Authentication Token, obtained through your developer account. Specify if using Token Authentication Mode.
"""
if apns_certificate is not None:
pulumi.set(__self__, "apns_certificate", apns_certificate)
if app_id is not None:
pulumi.set(__self__, "app_id", app_id)
if app_name is not None:
pulumi.set(__self__, "app_name", app_name)
if certificate_key is not None:
pulumi.set(__self__, "certificate_key", certificate_key)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if key_id is not None:
pulumi.set(__self__, "key_id", key_id)
if thumbprint is not None:
pulumi.set(__self__, "thumbprint", thumbprint)
if token is not None:
pulumi.set(__self__, "token", token)
@property
@pulumi.getter(name="apnsCertificate")
def apns_certificate(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate. Specify if using Certificate Authentication Mode.
"""
return pulumi.get(self, "apns_certificate")
@apns_certificate.setter
def apns_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "apns_certificate", value)
@property
@pulumi.getter(name="appId")
def app_id(self) -> Optional[pulumi.Input[str]]:
"""
The issuer (iss) registered claim key. The value is a 10-character TeamId, obtained from your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "app_id")
@app_id.setter
def app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "app_id", value)
@property
@pulumi.getter(name="appName")
def app_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the application or BundleId. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "app_name")
@app_name.setter
def app_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "app_name", value)
@property
@pulumi.getter(name="certificateKey")
def certificate_key(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate password if it exists.
"""
return pulumi.get(self, "certificate_key")
@certificate_key.setter
def certificate_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_key", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The APNS endpoint of this credential. If using Certificate Authentication Mode and Sandbox specify 'gateway.sandbox.push.apple.com'. If using Certificate Authentication Mode and Production specify 'gateway.push.apple.com'. If using Token Authentication Mode and Sandbox specify 'https://api.development.push.apple.com:443/3/device'. If using Token Authentication Mode and Production specify 'https://api.push.apple.com:443/3/device'.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter(name="keyId")
def key_id(self) -> Optional[pulumi.Input[str]]:
"""
A 10-character key identifier (kid) key, obtained from your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "key_id")
@key_id.setter
def key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_id", value)
@property
@pulumi.getter
def thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate thumbprint. Specify if using Certificate Authentication Mode.
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint", value)
@property
@pulumi.getter
def token(self) -> Optional[pulumi.Input[str]]:
"""
Provider Authentication Token, obtained through your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token", value)
@pulumi.input_type
class BaiduCredentialArgs:
def __init__(__self__, *,
baidu_api_key: Optional[pulumi.Input[str]] = None,
baidu_end_point: Optional[pulumi.Input[str]] = None,
baidu_secret_key: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub BaiduCredential.
:param pulumi.Input[str] baidu_api_key: Baidu Api Key.
:param pulumi.Input[str] baidu_end_point: Baidu Endpoint.
:param pulumi.Input[str] baidu_secret_key: Baidu Secret Key
"""
if baidu_api_key is not None:
pulumi.set(__self__, "baidu_api_key", baidu_api_key)
if baidu_end_point is not None:
pulumi.set(__self__, "baidu_end_point", baidu_end_point)
if baidu_secret_key is not None:
pulumi.set(__self__, "baidu_secret_key", baidu_secret_key)
@property
@pulumi.getter(name="baiduApiKey")
def baidu_api_key(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Api Key.
"""
return pulumi.get(self, "baidu_api_key")
@baidu_api_key.setter
def baidu_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_api_key", value)
@property
@pulumi.getter(name="baiduEndPoint")
def baidu_end_point(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Endpoint.
"""
return pulumi.get(self, "baidu_end_point")
@baidu_end_point.setter
def baidu_end_point(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_end_point", value)
@property
@pulumi.getter(name="baiduSecretKey")
def baidu_secret_key(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Secret Key
"""
return pulumi.get(self, "baidu_secret_key")
@baidu_secret_key.setter
def baidu_secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_secret_key", value)
@pulumi.input_type
class GcmCredentialArgs:
def __init__(__self__, *,
gcm_endpoint: Optional[pulumi.Input[str]] = None,
google_api_key: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub GcmCredential.
:param pulumi.Input[str] gcm_endpoint: The FCM legacy endpoint. Default value is 'https://fcm.googleapis.com/fcm/send'
:param pulumi.Input[str] google_api_key: The Google API key.
"""
if gcm_endpoint is not None:
pulumi.set(__self__, "gcm_endpoint", gcm_endpoint)
if google_api_key is not None:
pulumi.set(__self__, "google_api_key", google_api_key)
@property
@pulumi.getter(name="gcmEndpoint")
def gcm_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The FCM legacy endpoint. Default value is 'https://fcm.googleapis.com/fcm/send'
"""
return pulumi.get(self, "gcm_endpoint")
@gcm_endpoint.setter
def gcm_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gcm_endpoint", value)
@property
@pulumi.getter(name="googleApiKey")
def google_api_key(self) -> Optional[pulumi.Input[str]]:
"""
The Google API key.
"""
return pulumi.get(self, "google_api_key")
@google_api_key.setter
def google_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_api_key", value)
@pulumi.input_type
class MpnsCredentialArgs:
def __init__(__self__, *,
certificate_key: Optional[pulumi.Input[str]] = None,
mpns_certificate: Optional[pulumi.Input[str]] = None,
thumbprint: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub MpnsCredential.
:param pulumi.Input[str] certificate_key: The certificate key for this credential.
:param pulumi.Input[str] mpns_certificate: The MPNS certificate.
:param pulumi.Input[str] thumbprint: The MPNS certificate Thumbprint
"""
if certificate_key is not None:
pulumi.set(__self__, "certificate_key", certificate_key)
if mpns_certificate is not None:
pulumi.set(__self__, "mpns_certificate", mpns_certificate)
if thumbprint is not None:
pulumi.set(__self__, "thumbprint", thumbprint)
@property
@pulumi.getter(name="certificateKey")
def certificate_key(self) -> Optional[pulumi.Input[str]]:
"""
The certificate key for this credential.
"""
return pulumi.get(self, "certificate_key")
@certificate_key.setter
def certificate_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_key", value)
@property
@pulumi.getter(name="mpnsCertificate")
def mpns_certificate(self) -> Optional[pulumi.Input[str]]:
"""
The MPNS certificate.
"""
return pulumi.get(self, "mpns_certificate")
@mpns_certificate.setter
def mpns_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mpns_certificate", value)
@property
@pulumi.getter
def thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
The MPNS certificate Thumbprint
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint", value)
@pulumi.input_type
class SharedAccessAuthorizationRulePropertiesArgs:
def __init__(__self__, *,
rights: Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]] = None):
"""
SharedAccessAuthorizationRule properties.
:param pulumi.Input[Sequence[pulumi.Input['AccessRights']]] rights: The rights associated with the rule.
"""
if rights is not None:
pulumi.set(__self__, "rights", rights)
@property
@pulumi.getter
def rights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]]:
"""
The rights associated with the rule.
"""
return pulumi.get(self, "rights")
@rights.setter
def rights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]]):
pulumi.set(self, "rights", value)
@pulumi.input_type
class SkuArgs:
def __init__(__self__, *,
name: pulumi.Input[Union[str, 'SkuName']],
capacity: Optional[pulumi.Input[int]] = None,
family: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
tier: Optional[pulumi.Input[str]] = None):
"""
The Sku description for a namespace
:param pulumi.Input[Union[str, 'SkuName']] name: Name of the notification hub sku
:param pulumi.Input[int] capacity: The capacity of the resource
:param pulumi.Input[str] family: The Sku Family
:param pulumi.Input[str] size: The Sku size
:param pulumi.Input[str] tier: The tier of particular sku
"""
pulumi.set(__self__, "name", name)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def name(self) -> pulumi.Input[Union[str, 'SkuName']]:
"""
Name of the notification hub sku
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[Union[str, 'SkuName']]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
The capacity of the resource
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def family(self) -> Optional[pulumi.Input[str]]:
"""
The Sku Family
"""
return pulumi.get(self, "family")
@family.setter
def family(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "family", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
The Sku size
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
"""
The tier of particular sku
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class WnsCredentialArgs:
def __init__(__self__, *,
package_sid: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
windows_live_endpoint: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub WnsCredential.
:param pulumi.Input[str] package_sid: The package ID for this credential.
:param pulumi.Input[str] secret_key: The secret key.
:param pulumi.Input[str] windows_live_endpoint: The Windows Live endpoint.
"""
if package_sid is not None:
pulumi.set(__self__, "package_sid", package_sid)
if secret_key is not None:
pulumi.set(__self__, "secret_key", secret_key)
if windows_live_endpoint is not None:
pulumi.set(__self__, "windows_live_endpoint", windows_live_endpoint)
@property
@pulumi.getter(name="packageSid")
def package_sid(self) -> Optional[pulumi.Input[str]]:
"""
The package ID for this credential.
"""
return pulumi.get(self, "package_sid")
@package_sid.setter
def package_sid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "package_sid", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> Optional[pulumi.Input[str]]:
"""
The secret key.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter(name="windowsLiveEndpoint")
def windows_live_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The Windows Live endpoint.
"""
return pulumi.get(self, "windows_live_endpoint")
@windows_live_endpoint.setter
def windows_live_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "windows_live_endpoint", value)
| 37.458801 | 476 | 0.644103 |
acf2cfdfec629380aca0686400cb8f7d919da3de | 17,888 | py | Python | mitmproxy/tools/web/app.py | tomlabaude/mitmproxy | 3af4647804700bb6e86a9e1b73d7bf8612d872fa | [
"MIT"
] | 1 | 2019-10-20T18:59:18.000Z | 2019-10-20T18:59:18.000Z | mitmproxy/tools/web/app.py | tomlabaude/mitmproxy | 3af4647804700bb6e86a9e1b73d7bf8612d872fa | [
"MIT"
] | null | null | null | mitmproxy/tools/web/app.py | tomlabaude/mitmproxy | 3af4647804700bb6e86a9e1b73d7bf8612d872fa | [
"MIT"
] | null | null | null | import asyncio
import hashlib
import json
import logging
import os.path
import re
from io import BytesIO
import tornado.escape
import tornado.web
import tornado.websocket
import mitmproxy.flow
import mitmproxy.tools.web.master # noqa
from mitmproxy import contentviews
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import io
from mitmproxy import log
from mitmproxy import optmanager
from mitmproxy import version
def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
"""
Remove flow message content and cert to save transmission space.
Args:
flow: The original flow.
"""
f = {
"id": flow.id,
"intercepted": flow.intercepted,
"client_conn": flow.client_conn.get_state(),
"server_conn": flow.server_conn.get_state(),
"type": flow.type,
"modified": flow.modified(),
"marked": flow.marked,
}
# .alpn_proto_negotiated is bytes, we need to decode that.
for conn in "client_conn", "server_conn":
if f[conn]["alpn_proto_negotiated"] is None:
continue
f[conn]["alpn_proto_negotiated"] = \
f[conn]["alpn_proto_negotiated"].decode(errors="backslashreplace")
# There are some bytes in here as well, let's skip it until we have them in the UI.
f["client_conn"].pop("tls_extensions", None)
if flow.error:
f["error"] = flow.error.get_state()
if isinstance(flow, http.HTTPFlow):
if flow.request:
if flow.request.raw_content:
content_length = len(flow.request.raw_content)
content_hash = hashlib.sha256(flow.request.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["request"] = {
"method": flow.request.method,
"scheme": flow.request.scheme,
"host": flow.request.host,
"port": flow.request.port,
"path": flow.request.path,
"http_version": flow.request.http_version,
"headers": tuple(flow.request.headers.items(True)),
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.request.timestamp_start,
"timestamp_end": flow.request.timestamp_end,
"is_replay": flow.request.is_replay,
"pretty_host": flow.request.pretty_host,
}
if flow.response:
if flow.response.raw_content:
content_length = len(flow.response.raw_content)
content_hash = hashlib.sha256(flow.response.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["response"] = {
"http_version": flow.response.http_version,
"status_code": flow.response.status_code,
"reason": flow.response.reason,
"headers": tuple(flow.response.headers.items(True)),
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.response.timestamp_start,
"timestamp_end": flow.response.timestamp_end,
"is_replay": flow.response.is_replay,
}
f.get("server_conn", {}).pop("cert", None)
f.get("client_conn", {}).pop("mitmcert", None)
return f
def logentry_to_json(e: log.LogEntry) -> dict:
return {
"id": id(e), # we just need some kind of id.
"message": e.msg,
"level": e.level
}
class APIError(tornado.web.HTTPError):
pass
class RequestHandler(tornado.web.RequestHandler):
application: "Application"
def write(self, chunk):
# Writing arrays on the top level is ok nowadays.
# http://flask.pocoo.org/docs/0.11/security/#json-security
if isinstance(chunk, list):
chunk = tornado.escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
super(RequestHandler, self).write(chunk)
def set_default_headers(self):
super().set_default_headers()
self.set_header("Server", version.MITMPROXY)
self.set_header("X-Frame-Options", "DENY")
self.add_header("X-XSS-Protection", "1; mode=block")
self.add_header("X-Content-Type-Options", "nosniff")
self.add_header(
"Content-Security-Policy",
"default-src 'self'; "
"connect-src 'self' ws:; "
"style-src 'self' 'unsafe-inline'"
)
@property
def json(self):
if not self.request.headers.get("Content-Type", "").startswith("application/json"):
raise APIError(400, "Invalid Content-Type, expected application/json.")
try:
return json.loads(self.request.body.decode())
except Exception as e:
raise APIError(400, "Malformed JSON: {}".format(str(e)))
@property
def filecontents(self):
"""
Accept either a multipart/form file upload or just take the plain request body.
"""
if self.request.files:
return next(iter(self.request.files.values()))[0].body
else:
return self.request.body
@property
def view(self) -> "mitmproxy.addons.view.View":
return self.application.master.view
@property
def master(self) -> "mitmproxy.tools.web.master.WebMaster":
return self.application.master
@property
def flow(self) -> mitmproxy.flow.Flow:
flow_id = str(self.path_kwargs["flow_id"])
# FIXME: Add a facility to addon.view to safely access the store
flow = self.view.get_by_id(flow_id)
if flow:
return flow
else:
raise APIError(404, "Flow not found.")
def write_error(self, status_code: int, **kwargs):
if "exc_info" in kwargs and isinstance(kwargs["exc_info"][1], APIError):
self.finish(kwargs["exc_info"][1].log_message)
else:
super().write_error(status_code, **kwargs)
class IndexHandler(RequestHandler):
def get(self):
token = self.xsrf_token # https://github.com/tornadoweb/tornado/issues/645
assert token
self.render("index.html")
class FilterHelp(RequestHandler):
def get(self):
self.write(dict(
commands=flowfilter.help
))
class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler):
# raise an error if inherited class doesn't specify its own instance.
connections: set = None
def open(self):
self.connections.add(self)
def on_close(self):
self.connections.remove(self)
@classmethod
def broadcast(cls, **kwargs):
message = json.dumps(kwargs, ensure_ascii=False).encode("utf8", "surrogateescape")
for conn in cls.connections:
try:
conn.write_message(message)
except Exception: # pragma: no cover
logging.error("Error sending message", exc_info=True)
class ClientConnection(WebSocketEventBroadcaster):
connections: set = set()
class Flows(RequestHandler):
def get(self):
self.write([flow_to_json(f) for f in self.view])
class DumpFlows(RequestHandler):
def get(self):
self.set_header("Content-Disposition", "attachment; filename=flows")
self.set_header("Content-Type", "application/octet-stream")
bio = BytesIO()
fw = io.FlowWriter(bio)
for f in self.view:
fw.add(f)
self.write(bio.getvalue())
bio.close()
def post(self):
self.view.clear()
bio = BytesIO(self.filecontents)
for i in io.FlowReader(bio).stream():
asyncio.ensure_future(self.master.load_flow(i))
bio.close()
class ClearAll(RequestHandler):
def post(self):
self.view.clear()
self.master.events.clear()
class ResumeFlows(RequestHandler):
def post(self):
for f in self.view:
f.resume()
self.view.update([f])
class KillFlows(RequestHandler):
def post(self):
for f in self.view:
if f.killable:
f.kill()
self.view.update([f])
class ResumeFlow(RequestHandler):
def post(self, flow_id):
self.flow.resume()
self.view.update([self.flow])
class KillFlow(RequestHandler):
def post(self, flow_id):
if self.flow.killable:
self.flow.kill()
self.view.update([self.flow])
class FlowHandler(RequestHandler):
def delete(self, flow_id):
if self.flow.killable:
self.flow.kill()
self.view.remove([self.flow])
def put(self, flow_id):
flow = self.flow
flow.backup()
try:
for a, b in self.json.items():
if a == "request" and hasattr(flow, "request"):
request = flow.request
for k, v in b.items():
if k in ["method", "scheme", "host", "path", "http_version"]:
setattr(request, k, str(v))
elif k == "port":
request.port = int(v)
elif k == "headers":
request.headers.clear()
for header in v:
request.headers.add(*header)
elif k == "content":
request.text = v
else:
raise APIError(400, "Unknown update request.{}: {}".format(k, v))
elif a == "response" and hasattr(flow, "response"):
response = flow.response
for k, v in b.items():
if k in ["msg", "http_version"]:
setattr(response, k, str(v))
elif k == "code":
response.status_code = int(v)
elif k == "headers":
response.headers.clear()
for header in v:
response.headers.add(*header)
elif k == "content":
response.text = v
else:
raise APIError(400, "Unknown update response.{}: {}".format(k, v))
else:
raise APIError(400, "Unknown update {}: {}".format(a, b))
except APIError:
flow.revert()
raise
self.view.update([flow])
class DuplicateFlow(RequestHandler):
def post(self, flow_id):
f = self.flow.copy()
self.view.add([f])
self.write(f.id)
class RevertFlow(RequestHandler):
def post(self, flow_id):
if self.flow.modified():
self.flow.revert()
self.view.update([self.flow])
class ReplayFlow(RequestHandler):
def post(self, flow_id):
self.flow.backup()
self.flow.response = None
self.view.update([self.flow])
try:
self.master.commands.call("replay.client", [self.flow])
except exceptions.ReplayException as e:
raise APIError(400, str(e))
class FlowContent(RequestHandler):
def post(self, flow_id, message):
self.flow.backup()
message = getattr(self.flow, message)
message.content = self.filecontents
self.view.update([self.flow])
def get(self, flow_id, message):
message = getattr(self.flow, message)
if not message.raw_content:
raise APIError(400, "No content.")
content_encoding = message.headers.get("Content-Encoding", None)
if content_encoding:
content_encoding = re.sub(r"[^\w]", "", content_encoding)
self.set_header("Content-Encoding", content_encoding)
original_cd = message.headers.get("Content-Disposition", None)
filename = None
if original_cd:
filename = re.search(r'filename=([-\w" .()]+)', original_cd)
if filename:
filename = filename.group(1)
if not filename:
filename = self.flow.request.path.split("?")[0].split("/")[-1]
filename = re.sub(r'[^-\w" .()]', "", filename)
cd = "attachment; filename={}".format(filename)
self.set_header("Content-Disposition", cd)
self.set_header("Content-Type", "application/text")
self.set_header("X-Content-Type-Options", "nosniff")
self.set_header("X-Frame-Options", "DENY")
self.write(message.raw_content)
class FlowContentView(RequestHandler):
def get(self, flow_id, message, content_view):
message = getattr(self.flow, message)
description, lines, error = contentviews.get_message_content_view(
content_view.replace('_', ' '), message
)
# if error:
# add event log
self.write(dict(
lines=list(lines),
description=description
))
class Events(RequestHandler):
def get(self):
self.write([logentry_to_json(e) for e in self.master.events.data])
class Settings(RequestHandler):
def get(self):
self.write(dict(
version=version.VERSION,
mode=str(self.master.options.mode),
intercept_active=self.master.options.intercept_active,
intercept=self.master.options.intercept,
showhost=self.master.options.showhost,
upstream_cert=self.master.options.upstream_cert,
rawtcp=self.master.options.rawtcp,
http2=self.master.options.http2,
websocket=self.master.options.websocket,
anticache=self.master.options.anticache,
anticomp=self.master.options.anticomp,
stickyauth=self.master.options.stickyauth,
stickycookie=self.master.options.stickycookie,
stream=self.master.options.stream_large_bodies,
contentViews=[v.name.replace(' ', '_') for v in contentviews.views],
listen_host=self.master.options.listen_host,
listen_port=self.master.options.listen_port,
server=self.master.options.server,
))
def put(self):
update = self.json
option_whitelist = {
"intercept", "showhost", "upstream_cert", "ssl_insecure",
"rawtcp", "http2", "websocket", "anticache", "anticomp",
"stickycookie", "stickyauth", "stream_large_bodies"
}
for k in update:
if k not in option_whitelist:
raise APIError(400, "Unknown setting {}".format(k))
self.master.options.update(**update)
class Options(RequestHandler):
def get(self):
self.write(optmanager.dump_dicts(self.master.options))
def put(self):
update = self.json
try:
self.master.options.update(**update)
except Exception as err:
raise APIError(400, "{}".format(err))
class SaveOptions(RequestHandler):
def post(self):
# try:
# optmanager.save(self.master.options, CONFIG_PATH, True)
# except Exception as err:
# raise APIError(400, "{}".format(err))
pass
class DnsRebind(RequestHandler):
def get(self):
raise tornado.web.HTTPError(
403,
reason="To protect against DNS rebinding, mitmweb can only be accessed by IP at the moment. "
"(https://github.com/mitmproxy/mitmproxy/issues/3234)"
)
class Application(tornado.web.Application):
master: "mitmproxy.tools.web.master.WebMaster"
def __init__(self, master: "mitmproxy.tools.web.master.WebMaster", debug: bool) -> None:
self.master = master
super().__init__(
default_host="dns-rebind-protection",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
cookie_secret=os.urandom(256),
debug=debug,
autoreload=False,
)
self.add_handlers("dns-rebind-protection", [(r"/.*", DnsRebind)])
self.add_handlers(
# make mitmweb accessible by IP only to prevent DNS rebinding.
r'^(localhost|[0-9.:\[\]]+)$',
[
(r"/", IndexHandler),
(r"/filter-help(?:\.json)?", FilterHelp),
(r"/updates", ClientConnection),
(r"/events(?:\.json)?", Events),
(r"/flows(?:\.json)?", Flows),
(r"/flows/dump", DumpFlows),
(r"/flows/resume", ResumeFlows),
(r"/flows/kill", KillFlows),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)", FlowHandler),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/resume", ResumeFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/kill", KillFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/duplicate", DuplicateFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/replay", ReplayFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/revert", RevertFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/(?P<message>request|response)/content.data", FlowContent),
(
r"/flows/(?P<flow_id>[0-9a-f\-]+)/(?P<message>request|response)/content/(?P<content_view>[0-9a-zA-Z\-\_]+)(?:\.json)?",
FlowContentView),
(r"/settings(?:\.json)?", Settings),
(r"/clear", ClearAll),
(r"/options(?:\.json)?", Options),
(r"/options/save", SaveOptions)
]
)
| 34.268199 | 139 | 0.568538 |
acf2d07e92f1c6ebf6f4614026b2c1575bed88e0 | 1,939 | py | Python | Chapter13/listing13_4.py | hohsieh/osgeopy-code | 932157c748c8fedb67d862b266a983fdd29ead56 | [
"MIT"
] | 160 | 2015-01-11T06:45:11.000Z | 2022-03-07T15:09:57.000Z | Chapter13/listing13_4.py | sthagen/osgeopy-code | bc85f4ec7a630b53502ee491e400057b67cdab22 | [
"MIT"
] | 3 | 2018-09-29T11:34:13.000Z | 2020-07-20T16:45:23.000Z | Chapter13/listing13_4.py | sthagen/osgeopy-code | bc85f4ec7a630b53502ee491e400057b67cdab22 | [
"MIT"
] | 108 | 2015-05-28T11:29:01.000Z | 2022-02-12T12:01:46.000Z | # Script to draw world countries as patches.
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
from osgeo import ogr
def order_coords(coords, clockwise):
"""Orders coordinates."""
total = 0
x1, y1 = coords[0]
for x, y in coords[1:]:
total += (x - x1) * (y + y1)
x1, y1 = x, y
x, y = coords[0]
total += (x - x1) * (y + y1)
is_clockwise = total > 0
if clockwise != is_clockwise:
coords.reverse()
return coords
def make_codes(n):
"""Makes a list of path codes."""
codes = [Path.LINETO] * n
codes[0] = Path.MOVETO
return codes
def plot_polygon_patch(poly, color):
"""Plots a polygon as a patch."""
# Outer clockwise path.
coords = poly.GetGeometryRef(0).GetPoints()
coords = order_coords(coords, True)
codes = make_codes(len(coords))
for i in range(1, poly.GetGeometryCount()):
# Inner counter-clockwise paths.
coords2 = poly.GetGeometryRef(i).GetPoints()
coords2 = order_coords(coords2, False)
codes2 = make_codes(len(coords2))
# Concatenate the paths.
coords = np.concatenate((coords, coords2))
codes = np.concatenate((codes, codes2))
# Add the patch to the plot
path = Path(coords, codes)
patch = patches.PathPatch(path, facecolor=color)
plt.axes().add_patch(patch)
# Loop through all of the features in the countries layer and create
# patches for the polygons.
ds = ogr.Open(r'D:\osgeopy-data\global\ne_110m_admin_0_countries.shp')
lyr = ds.GetLayer(0)
for row in lyr:
geom = row.geometry()
if geom.GetGeometryType() == ogr.wkbPolygon:
plot_polygon_patch(geom, 'yellow')
elif geom.GetGeometryType() == ogr.wkbMultiPolygon:
for i in range(geom.GetGeometryCount()):
plot_polygon_patch(geom.GetGeometryRef(i), 'yellow')
plt.axis('equal')
plt.show()
| 30.296875 | 70 | 0.652914 |
acf2d202b8e2b836eff1274061caaa5494ea99b8 | 3,316 | py | Python | my/make_comparison_table.py | cibu/language-resources | a6158942286112b2808460b2cdbcb03260e8879b | [
"Apache-2.0"
] | 177 | 2019-05-31T17:58:29.000Z | 2022-03-27T11:24:00.000Z | my/make_comparison_table.py | cibu/language-resources | a6158942286112b2808460b2cdbcb03260e8879b | [
"Apache-2.0"
] | 21 | 2019-05-29T09:51:11.000Z | 2021-12-08T01:54:13.000Z | my/make_comparison_table.py | cibu/language-resources | a6158942286112b2808460b2cdbcb03260e8879b | [
"Apache-2.0"
] | 57 | 2015-09-28T12:05:27.000Z | 2019-05-15T05:44:50.000Z | #! /usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script used for creating a comparsion table between Zawgyi and Unicode.
Reads two-column tab-separated values from stdin and writes a LaTeX document to
stdout. Column 1 of the input is expected to be in Zawgyi encoding; column 2 is
expected to be in Unicode 5.1 encoding. The resulting document can be compiled
with XeLaTeX or LuaLaTeX from a recent version of TeX Live (tested with TL17).
The output document requires the following TrueType fonts:
* Noto Sans, which can be downloaded from
https://www.google.com/get/noto/
* Padauk, which can be downloaded from
http://scripts.sil.org/cms/scripts/page.php?item_id=Padauk
(tested with version 3.003)
* Zawgyi-One, which was originally distributed by
http://web.archive.org/web/20120628203851/http://www.zawgyi.net/
and which can be downloaded from various websites, including e.g.
http://www.rfa.org/burmese/help/ZawgyiOne.ttf
(md5sum: e5e2acb2d3bdf1d128355125e41f1964)
"""
import io
import sys
STDIN = io.open(0, mode='rt', encoding='utf-8', closefd=False)
STDOUT = io.open(1, mode='wt', encoding='utf-8', closefd=False)
STDERR = io.open(2, mode='wt', encoding='utf-8', closefd=False)
DOCUMENT_HEADER = r'''\documentclass{article}
\usepackage{geometry}
\geometry{paperwidth=595bp,paperheight=792bp,margin=36bp,noheadfoot}
\usepackage{fontspec}
\setmainfont{NotoSans}
\newfontface\codepoints{NotoSans-Condensed}
\newfontface\unicode{PadaukBook-Regular}[Path=fonts/,Scale=1.1]
%%\newfontface\unicode{mm3}[Path=fonts/,Scale=1.05,Script=Myanmar]
\newfontface\zawgyi{ZawgyiOne}[Path=fonts/]
\usepackage{longtable}
\usepackage[table]{xcolor}
\definecolor{light-gray}{gray}{0.95}
\renewcommand{\arraystretch}{1.5}
\begin{document}
\begin{center}
\rowcolors{1}{light-gray}{white}
\begin{longtable}{r l l}
\textbf{Line} & \textbf{Zawgyi} & \textbf{Codepoints (offset 0x1000)} \\*
& \textbf{Unicode} & \\[16bp]
\endhead
'''
TABLE_ITEM = r'''
%s & {\zawgyi %s} & {\codepoints\footnotesize %s} \\*
%s & {\unicode %s} & {\codepoints\footnotesize %s} \\[10bp]
'''
DOCUMENT_FOOTER = r'''
\end{longtable}
\end{center}
\end{document}
'''
def Codepoint(char):
cp = ord(char)
if 0x1000 <= cp <= 0x109F:
return '%02X' % (cp - 0x1000)
return '%04X' % cp
def main(unused_argv):
STDOUT.write(DOCUMENT_HEADER)
n = 0
for line in STDIN:
line = line.rstrip('\n')
fields = line.split('\t')
assert len(fields) == 2
z, u = fields
n += 1
STDOUT.write(TABLE_ITEM %
('%4d' % n, z, ' '.join(Codepoint(c) for c in z),
' ' * 4, u, ' '.join(Codepoint(c) for c in u)))
STDOUT.write(DOCUMENT_FOOTER)
return
if __name__ == '__main__':
main(sys.argv)
| 30.990654 | 79 | 0.708384 |
acf2d202bb8bd76e1ce31022b0be329dfa883822 | 128 | py | Python | tests/testthat/script.py | Suitgeeks/reticulate | f537fdb6975122c1e71cb955d44ae674546e069e | [
"Apache-2.0"
] | null | null | null | tests/testthat/script.py | Suitgeeks/reticulate | f537fdb6975122c1e71cb955d44ae674546e069e | [
"Apache-2.0"
] | null | null | null | tests/testthat/script.py | Suitgeeks/reticulate | f537fdb6975122c1e71cb955d44ae674546e069e | [
"Apache-2.0"
] | 1 | 2019-12-16T13:09:28.000Z | 2019-12-16T13:09:28.000Z | value = 42
def add(x, y):
return x + y
def secret():
return value
def _helper(): return 42
def api(): return _helper()
| 11.636364 | 27 | 0.625 |
acf2d2cb79151abde98d56c97c496d409ecb1ea2 | 241 | py | Python | ch99/photo/forms.py | dukuaris/django_bookmark | d6e8486999a8db8fc99c4b7dae0ddac402828c9d | [
"MIT"
] | null | null | null | ch99/photo/forms.py | dukuaris/django_bookmark | d6e8486999a8db8fc99c4b7dae0ddac402828c9d | [
"MIT"
] | 13 | 2020-02-12T03:19:26.000Z | 2022-02-10T12:50:46.000Z | ch99/photo/forms.py | dukuaris/django_web | d6e8486999a8db8fc99c4b7dae0ddac402828c9d | [
"MIT"
] | null | null | null | from django.forms import inlineformset_factory
from photo.models import Album, Photo
PhotoInlineFormSet = inlineformset_factory(Album, Photo,
fields = ['image', 'title', 'description'],
extra = 2)
| 30.125 | 64 | 0.643154 |
acf2d2e76bd9fe51aaea6ea222da5d3e102497d3 | 1,136 | py | Python | mason/engines/execution/models/jobs/preview_job.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 4 | 2021-04-12T17:49:34.000Z | 2022-01-23T19:54:29.000Z | mason/engines/execution/models/jobs/preview_job.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 24 | 2021-04-30T18:40:25.000Z | 2021-05-12T20:52:06.000Z | mason/engines/execution/models/jobs/preview_job.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 3 | 2021-04-12T19:40:43.000Z | 2021-09-07T21:56:36.000Z | from typing import Optional, Any, Dict
from mason.engines.execution.models.jobs import Job
from mason.engines.metastore.models.credentials import MetastoreCredentials
from mason.engines.storage.models.path import Path
class PreviewJob(Job):
def __init__(self, input_path: Path, input_format: str, output_path: Path, credentials: Optional[MetastoreCredentials], read_headers: bool = False, limit: int = 10):
self.input_path = input_path
self.input_format = input_format
self.output_path = output_path
self.credentials = credentials
self.read_headers = read_headers
self.limit = limit
super().__init__("preview")
def spec(self) -> dict:
spec: Dict[str, Any] = {
'input_path': self.input_path.full_path(),
'input_format': self.input_format,
'output_path': self.output_path.full_path(),
'limit': self.limit
}
credentials = self.credentials
if credentials:
spec = {**spec, **credentials.to_dict()}
spec['read_headers'] = self.read_headers
return spec
| 31.555556 | 169 | 0.65581 |
acf2d3b3f79957b16d3f345ed0ea37b5bc30bdd9 | 7,797 | py | Python | RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/ospfsimulatedtopologyconfig.py | ralfjon/IxNetwork | c0c834fbc465af69c12fd6b7cee4628baba7fff1 | [
"MIT"
] | null | null | null | RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/ospfsimulatedtopologyconfig.py | ralfjon/IxNetwork | c0c834fbc465af69c12fd6b7cee4628baba7fff1 | [
"MIT"
] | null | null | null | RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/ospfsimulatedtopologyconfig.py | ralfjon/IxNetwork | c0c834fbc465af69c12fd6b7cee4628baba7fff1 | [
"MIT"
] | null | null | null |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class OspfSimulatedTopologyConfig(Base):
"""The OspfSimulatedTopologyConfig class encapsulates a system managed ospfSimulatedTopologyConfig node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the OspfSimulatedTopologyConfig property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server by using the find method.
"""
_SDM_NAME = 'ospfSimulatedTopologyConfig'
def __init__(self, parent):
super(OspfSimulatedTopologyConfig, self).__init__(parent)
@property
def Active(self):
"""Activate/Deactivate Configuration
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('active')
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
Returns:
number
"""
return self._get_attribute('count')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
def find(self, Count=None, DescriptiveName=None, Name=None):
"""Finds and retrieves ospfSimulatedTopologyConfig data from the server.
All named parameters support regex and can be used to selectively retrieve ospfSimulatedTopologyConfig data from the server.
By default the find method takes no parameters and will retrieve all ospfSimulatedTopologyConfig data from the server.
Args:
Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Returns:
self: This instance with matching ospfSimulatedTopologyConfig data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of ospfSimulatedTopologyConfig data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the ospfSimulatedTopologyConfig data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Start(self):
"""Executes the start operation on the server.
Start OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def Start(self, SessionIndices):
"""Executes the start operation on the server.
Start OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def Start(self, SessionIndices):
"""Executes the start operation on the server.
Start OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stop OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
def Stop(self, SessionIndices):
"""Executes the stop operation on the server.
Stop OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
def Stop(self, SessionIndices):
"""Executes the stop operation on the server.
Stop OSPF Simulated Topology
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
| 38.220588 | 149 | 0.745543 |
acf2d5e16301358de4d27c25f95181c2ea71e406 | 3,127 | py | Python | deepsky/evaluation.py | djgagne/deepsky | a01886b8b8bb58dbe1dc68670432c05ed72a2786 | [
"MIT"
] | 17 | 2019-05-24T10:11:34.000Z | 2021-10-06T01:16:31.000Z | deepsky/evaluation.py | djgagne/deepsky | a01886b8b8bb58dbe1dc68670432c05ed72a2786 | [
"MIT"
] | null | null | null | deepsky/evaluation.py | djgagne/deepsky | a01886b8b8bb58dbe1dc68670432c05ed72a2786 | [
"MIT"
] | 9 | 2019-08-20T17:47:02.000Z | 2021-09-30T11:26:19.000Z | import numpy as np
from numba import jit
from scipy.stats import ttest_ind
@jit(nopython=True)
def spatial_covariance(distances, z, eval_distances, tolerance=0.2):
"""
Calculate the empirical covariances among all points that are a certain distance apart.
Args:
distances: Square distance matrix between all points in terms of number of grid points
z: Intensity values at each point
eval_distances: Distance values at which covariance is calculated
tolerance:
Returns:
Spatial covariance values for each eval_distance value.
"""
if distances[np.triu_indices(distances.shape[0])].max() > 1000:
sub_distances = distances
else:
sub_distances = np.array(distances, copy=True)
sub_distances[np.triu_indices(sub_distances.shape[0])] = 999999
covariances = np.zeros(eval_distances.size)
z_flat = z.ravel()
for d, eval_distance in enumerate(eval_distances):
points_a, points_b = np.where(np.abs(sub_distances - eval_distance) <= tolerance)
covariances[d] = np.sum((z_flat[points_a] - z_flat[points_a].mean()) *
(z_flat[points_b] - z_flat[points_b].mean())) / (float(points_a.size) - 1.0)
covariances[d] /= z_flat[points_a].std() * z_flat[points_b].std()
return covariances
@jit(nopython=True)
def local_spatial_covariance(window_width, stride, distances, z, eval_distances, tolerance=0.2):
"""
Calculate spatial covariance values within a moving window over a spatial domain.
Args:
window_width: width of the spatial window in number of grid points
stride: how far to advance the window between covariance calculations
distances: Pointwise distance matrix
z: Intensity values being evaluated. Should be in 2D grid shape
eval_distances: Set of distances being evaluated
tolerance: Bounds for capturing points within distance +/- tolerance value.
Returns:
Grid of covariance values with dimensions (eval_distances,
z.shape[0] - window_width + 1) // stride,
z.shape[1] - window_width + 1) // stride)
"""
num_windows_col = (z.shape[1] - window_width + 1) // stride
num_windows_row = (z.shape[0] - window_width + 1) // stride
cov_grid = np.zeros((len(eval_distances), num_windows_row, num_windows_col))
w_i = 0
w_j = 0
c_i = 0
c_j = 0
index_grid = np.arange(z.size).reshape(z.shape)
while w_i < z.shape[0] - window_width:
while w_j < z.shape[1] - window_width:
d_points = index_grid[w_i: w_i + window_width, w_j: w_j + window_width].ravel()
cov_grid[:, c_i, c_j] = spatial_covariance(distances[d_points, d_points],
z[w_i: w_i + window_width, w_j: w_j + window_width],
eval_distances, tolerance=tolerance)
w_j += stride
c_j += 1
w_i += stride
c_j += 1
return cov_grid
| 42.835616 | 108 | 0.627119 |
acf2d61e495495fc6c1b591e4bf0766c6aa695ad | 13,527 | py | Python | networkx/algorithms/centrality/current_flow_betweenness.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | 1 | 2020-08-08T21:52:34.000Z | 2020-08-08T21:52:34.000Z | networkx/algorithms/centrality/current_flow_betweenness.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | 2 | 2019-11-13T03:48:53.000Z | 2021-02-15T16:52:09.000Z | networkx/algorithms/centrality/current_flow_betweenness.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (C) 2010-2019 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
#
# Author: Aric Hagberg (hagberg@lanl.gov)
"""Current-flow betweenness centrality measures."""
import networkx as nx
from networkx.algorithms.centrality.flow_matrix import (
CGInverseLaplacian,
flow_matrix_row,
FullInverseLaplacian,
laplacian_sparse_matrix,
SuperLUInverseLaplacian,
)
from networkx.utils import (not_implemented_for,
reverse_cuthill_mckee_ordering,
py_random_state)
__all__ = ['current_flow_betweenness_centrality',
'approximate_current_flow_betweenness_centrality',
'edge_current_flow_betweenness_centrality']
@py_random_state(7)
@not_implemented_for('directed')
def approximate_current_flow_betweenness_centrality(G, normalized=True,
weight=None,
dtype=float, solver='full',
epsilon=0.5, kmax=10000,
seed=None):
r"""Compute the approximate current-flow betweenness centrality for nodes.
Approximates the current-flow betweenness centrality within absolute
error of epsilon with high probability [1]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default=None)
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype : data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver : string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
epsilon: float
Absolute error tolerance.
kmax: int
Maximum number of sample node pairs to use for approximation.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
current_flow_betweenness_centrality
Notes
-----
The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
and the space required is $O(m)$ for $n$ nodes and $m$ edges.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Ulrik Brandes and Daniel Fleischer:
Centrality Measures Based on Current Flow.
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
"""
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
from scipy import sparse
from scipy.sparse import linalg
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
solvername = {"full": FullInverseLaplacian,
"lu": SuperLUInverseLaplacian,
"cg": CGInverseLaplacian}
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight,
dtype=dtype, format='csc')
C = solvername[solver](L, dtype=dtype) # initialize solver
betweenness = dict.fromkeys(H, 0.0)
nb = (n - 1.0) * (n - 2.0) # normalization factor
cstar = n * (n - 1) / nb
l = 1 # parameter in approximation, adjustable
k = l * int(np.ceil((cstar / epsilon)**2 * np.log(n)))
if k > kmax:
msg = 'Number random pairs k>kmax (%d>%d) ' % (k, kmax)
raise nx.NetworkXError(msg, 'Increase kmax or epsilon')
cstar2k = cstar / (2 * k)
for i in range(k):
s, t = seed.sample(range(n), 2)
b = np.zeros(n, dtype=dtype)
b[s] = 1
b[t] = -1
p = C.solve(b)
for v in H:
if v == s or v == t:
continue
for nbr in H[v]:
w = H[v][nbr].get(weight, 1.0)
betweenness[v] += w * np.abs(p[v] - p[nbr]) * cstar2k
if normalized:
factor = 1.0
else:
factor = nb / 2.0
# remap to original node names and "unnormalize" if required
return dict((ordering[k], float(v * factor)) for k, v in betweenness.items())
@not_implemented_for('directed')
def current_flow_betweenness_centrality(G, normalized=True, weight=None,
dtype=float, solver='full'):
r"""Compute current-flow betweenness centrality for nodes.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default=None)
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype : data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver : string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
approximate_current_flow_betweenness_centrality
betweenness_centrality
edge_betweenness_centrality
edge_current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
time [1]_, where $I(n-1)$ is the time needed to compute the
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
Laplacian matrix condition number.
The space required is $O(nw)$ where $w$ is the width of the sparse
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
import scipy
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype,
solver=solver):
pos = dict(zip(row.argsort()[::-1], range(n)))
for i in range(n):
betweenness[s] += (i - pos[i]) * row[i]
betweenness[t] += (n - i - 1 - pos[i]) * row[i]
if normalized:
nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for v in H:
betweenness[v] = float((betweenness[v] - v) * 2.0 / nb)
return dict((ordering[k], v) for k, v in betweenness.items())
@not_implemented_for('directed')
def edge_current_flow_betweenness_centrality(G, normalized=True,
weight=None,
dtype=float, solver='full'):
r"""Compute current-flow betweenness centrality for edges.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default=None)
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype : data type (default=float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver : string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dictionary
Dictionary of edge tuples with betweenness centrality as the value.
Raises
------
NetworkXError
The algorithm does not support DiGraphs.
If the input graph is an instance of DiGraph class, NetworkXError
is raised.
See Also
--------
betweenness_centrality
edge_betweenness_centrality
current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
time [1]_, where $I(n-1)$ is the time needed to compute the
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
Laplacian matrix condition number.
The space required is $O(nw)$ where $w$ is the width of the sparse
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
from networkx.utils import reverse_cuthill_mckee_ordering
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
import scipy
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
edges = (tuple(sorted((u, v))) for u, v in H.edges())
betweenness = dict.fromkeys(edges, 0.0)
if normalized:
nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype,
solver=solver):
pos = dict(zip(row.argsort()[::-1], range(1, n + 1)))
for i in range(n):
betweenness[e] += (i + 1 - pos[i]) * row[i]
betweenness[e] += (n - i - pos[i]) * row[i]
betweenness[e] /= nb
return dict(((ordering[s], ordering[t]), float(v))
for (s, t), v in betweenness.items())
| 37.060274 | 81 | 0.622163 |
acf2d634262c5b9ddd6e45741ff36e09ad63e7a4 | 2,849 | py | Python | emingora/pom/analyser/tools/GAVDeclaredAndDefine.py | Ginxo/pom-analyser | fbdecf7b1bf6710df5b7b77e9074fb265eec1d86 | [
"Apache-2.0"
] | null | null | null | emingora/pom/analyser/tools/GAVDeclaredAndDefine.py | Ginxo/pom-analyser | fbdecf7b1bf6710df5b7b77e9074fb265eec1d86 | [
"Apache-2.0"
] | 8 | 2019-11-13T11:46:39.000Z | 2022-01-27T16:20:47.000Z | emingora/pom/analyser/tools/GAVDeclaredAndDefine.py | Ginxo/pom-analyser | fbdecf7b1bf6710df5b7b77e9074fb265eec1d86 | [
"Apache-2.0"
] | null | null | null | from emingora.pom.analyser.entity.GAV import GAV
from emingora.pom.analyser.entity.Pom import Pom
from emingora.pom.analyser.utils.GAVUtils import GAVUtils
class GAVDeclaredAndDefine:
@staticmethod
def get_repeated_gavs(pom: Pom, found_dict: {} = {}, inherited_dep_management: [] = None) -> {}:
dependencies_management = (pom.dependencies_management if pom.dependencies_management is not None else []) + \
(inherited_dep_management if inherited_dep_management is not None else [])
for dependency in dependencies_management:
GAVDeclaredAndDefine.__check(dependency, pom.dependencies, found_dict)
GAVDeclaredAndDefine.__check_children(pom, dependency, dependencies_management, found_dict)
GAVDeclaredAndDefine.__check_parents(pom.dependencies_management, pom.parent, found_dict)
return found_dict
@staticmethod
def __check(dependency: GAV, dependencies: [], found_dict: {}):
for dep in dependencies if dependencies is not None else []:
if GAVUtils.is_gav_equal_not_none_version(dependency, dep):
GAVDeclaredAndDefine.__fill_found_dict(dependency, dep, found_dict)
@staticmethod
def __check_children(pom: Pom, dependency: GAV, dependencies_management: [], found_dict: {}):
if pom.children is not None:
for children in pom.children:
GAVDeclaredAndDefine.__check(dependency, children.dependencies, found_dict)
GAVDeclaredAndDefine.__check(dependency, children.dependencies_management, found_dict)
for child in pom.children:
GAVDeclaredAndDefine.get_repeated_gavs(child, found_dict, dependencies_management)
@staticmethod
def __check_parents(dependencies_management: [], parent_pom: Pom, found_dict: {}):
if parent_pom is not None:
for dependency_management in dependencies_management if dependencies_management is not None and parent_pom.dependencies is not None else []:
GAVDeclaredAndDefine.__check(dependency_management, parent_pom.dependencies, found_dict)
if parent_pom.parent is not None:
GAVDeclaredAndDefine.__check_parents(dependencies_management, parent_pom.parent, found_dict)
@staticmethod
def __fill_found_dict(dependency1: GAV, dependency2: GAV, found_dict: {}):
if found_dict.get(dependency1.belonging_pom.get_id()) is None:
found_dict[dependency1.belonging_pom.get_id()] = {}
project_key = found_dict.get(dependency1.belonging_pom.get_id())
if project_key.get(dependency1.get_id()) is None:
project_key[dependency1.get_id()] = []
if dependency2 not in project_key.get(dependency1.get_id()):
project_key.get(dependency1.get_id()).append(dependency2)
| 52.759259 | 152 | 0.715339 |
acf2d6ea638ea2c1e6b3f4eac9b98a8a028dc4d4 | 1,145 | py | Python | C3CTF/2019 36C3/dumb_theory/check.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | 1 | 2021-08-24T22:16:41.000Z | 2021-08-24T22:16:41.000Z | C3CTF/2019 36C3/dumb_theory/check.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | C3CTF/2019 36C3/dumb_theory/check.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
import hashlib
import struct
r = 3
e = 0x6878703c33796f75002d
target = 'Hello hxp! I would like the flag, please. Thank you.'
def mul(a, b):
z = [0, 0]*r
for i in range(r):
for j in range(r):
z[i+j] += a[i]*b[j]
while len(z) > r:
y = z.pop()
z[-r] += sum(map(eval, 'yyyyyyy'))
return tuple(t for t in z)
def exp(x, k):
y = [not i for i in range(r)]
for i in range(k.bit_length()):
if (k >> i) & 1:
y = mul(y, x)
x = mul(x, x)
return y
def H(msg):
h = hashlib.sha256(msg.encode()).digest()
v = tuple(c+1 for c in struct.unpack('>%sH' % r, h[:r+r]))
return v
def sha256_target(c1, c2):
h = hashlib.sha256("Hello hxp! I would like the flag, please{} Thank you{}".format(
c1, c2).encode()).digest()
v = tuple(c+1 for c in struct.unpack('>%sH' % r, h[:r+r]))
return v
block1 = sha256_target('바', '보')
block2 = sha256_target('a', 'b')
block3 = sha256_target('ú', 'C')
print(block1)
print(block2)
print(block3)
print(mul(block1, block2))
print(mul(block1, block3))
print(mul(block2, block3))
| 21.203704 | 87 | 0.561572 |
acf2d72a50502f1241108c8c0efbe2fc09b51910 | 1,171 | py | Python | tools/scraper/wiper.py | ucam-cl-dtg/android-vulnerabilities | 5a5158e5faea7de3fdbb822e6f64d1cd713aebcf | [
"BSD-2-Clause"
] | 16 | 2015-04-27T22:36:58.000Z | 2020-07-10T01:32:29.000Z | tools/scraper/wiper.py | ucam-cl-dtg/android-vulnerabilities | 5a5158e5faea7de3fdbb822e6f64d1cd713aebcf | [
"BSD-2-Clause"
] | 6 | 2015-11-05T13:12:47.000Z | 2019-08-14T14:27:05.000Z | tools/scraper/wiper.py | ucam-cl-dtg/android-vulnerabilities | 5a5158e5faea7de3fdbb822e6f64d1cd713aebcf | [
"BSD-2-Clause"
] | 9 | 2015-07-15T02:40:47.000Z | 2021-11-29T06:32:29.000Z | # Copyright (C) Daniel Carter 2019
# Licensed under the 2-clause BSD licence
import os
import json
import sys
def load_manual_data(cve):
"""Returns manually entered data on the vulnerability, to be combined with automatically scraped data"""
path = 'manual-data/{cve}'.format(cve=cve)
data = dict()
if os.path.isfile(path):
with open(path, 'r') as f:
rjson = json.load(f)
for key, value in rjson.items():
data[key] = value
return data
def write_manual_data(cve, data):
"""Writes manually entered data out to a file"""
with open('manual-data/{cve}'.format(cve=cve), 'w') as f:
json.dump(data, f, indent=2)
print('This program will erase data fields from the manual data files. Please use with care')
key = input('Please enter the field to erase: ')
print('Set to erase ' + key)
response = input('Do you want to continue? (y/n) ')
if response != 'y':
sys.exit(0)
for filename in os.listdir('manual-data/'):
if filename != 'attributes':
print(filename)
data = load_manual_data(filename)
del data[key]
write_manual_data(filename, data)
| 30.025641 | 108 | 0.642186 |
acf2d82e1e2341eede6bf581a777c26f20987e00 | 1,032 | py | Python | pyclesperanto_prototype/_tier1/_greater.py | haesleinhuepf/pyclesperanto_prototype | 65bc3035d3b2b61a2722c93b95bae310bfbd190e | [
"BSD-3-Clause"
] | 1 | 2021-01-15T15:32:19.000Z | 2021-01-15T15:32:19.000Z | pyclesperanto_prototype/_tier1/_greater.py | haesleinhuepf/pyclesperanto_prototype | 65bc3035d3b2b61a2722c93b95bae310bfbd190e | [
"BSD-3-Clause"
] | null | null | null | pyclesperanto_prototype/_tier1/_greater.py | haesleinhuepf/pyclesperanto_prototype | 65bc3035d3b2b61a2722c93b95bae310bfbd190e | [
"BSD-3-Clause"
] | null | null | null | from .._tier0 import execute
from .._tier0 import plugin_function
from .._tier0 import Image
@plugin_function(categories=['combine', 'binarize', 'in assistant'])
def greater(source1 : Image, source2 : Image, destination : Image = None):
"""Determines if two images A and B greater pixel wise.
f(a, b) = 1 if a > b; 0 otherwise.
Parameters
----------
source1 : Image
source2 : Image
destination : Image
Returns
-------
destination
Examples
--------
>>> import pyclesperanto_prototype as cle
>>> cle.greater(source1, source2, destination)
References
----------
.. [1] https://clij.github.io/clij2-docs/reference_greater
"""
parameters = {
"src1":source1,
"src2":source2,
"dst":destination
}
execute(__file__, '../clij-opencl-kernels/kernels/greater_' + str(len(destination.shape)) + 'd_x.cl', 'greater_' + str(len(destination.shape)) + 'd', destination.shape, parameters)
return destination
| 25.170732 | 184 | 0.613372 |
acf2db126b23b58bfd008890ed6ce4e7858e7c97 | 864 | py | Python | MEETinTurtle.py | roni20-meet/meet2018y1lab1 | c0ac28bc7dcee089f16a69a9738fe3a9045e7f08 | [
"MIT"
] | null | null | null | MEETinTurtle.py | roni20-meet/meet2018y1lab1 | c0ac28bc7dcee089f16a69a9738fe3a9045e7f08 | [
"MIT"
] | null | null | null | MEETinTurtle.py | roni20-meet/meet2018y1lab1 | c0ac28bc7dcee089f16a69a9738fe3a9045e7f08 | [
"MIT"
] | null | null | null | import turtle
# Everything that comes after the # is a
# comment.
# It is a note to the person reading the code.
# The computer ignores it.
# Write your code below here...
turtle.penup() #Pick up the pen so it doesn’t
#draw
turtle.goto(-200,-100) #Move the turtle to the
#position (-200, -100)
#on the screen
turtle.pendown() #Put the pen down to start
#drawing
#Draw the M:
turtle.goto(-200,-100+200)
turtle.goto(-200+50,-100)
turtle.goto(-200+100,-100+200)
turtle.goto(-200+100,-100)
turtle.penup()
turtle.goto(-50,-100)
turtle.pendown()
turtle.goto(-50,100)
turtle.goto(50,100)
turtle.penup()
turtle.goto(50,0)
turtle.pendown()
turtle.goto(-50,0)
turtle.penup()
turtle.goto(-50,-100)
turtle.pendown()
turtle.goto(50,-100)
turtle.penup()
turtle.goto(100,-100)
# ...and end it before the next line.
turtle.mainloop()
| 20.571429 | 47 | 0.678241 |
acf2db3505739383657ed1f11ed2bdaeae7030de | 467 | py | Python | resume2/web/admin/__init__.py | tonywu7/resume2 | 5a67e77427404bbc6f3cd5829a317c1e3954c277 | [
"MIT"
] | null | null | null | resume2/web/admin/__init__.py | tonywu7/resume2 | 5a67e77427404bbc6f3cd5829a317c1e3954c277 | [
"MIT"
] | null | null | null | resume2/web/admin/__init__.py | tonywu7/resume2 | 5a67e77427404bbc6f3cd5829a317c1e3954c277 | [
"MIT"
] | null | null | null | from pathlib import Path
from django.contrib.admin import AdminSite
from ts2admin.models import (BaseModelAdmin, register_all_default,
register_all_defined)
from ts2admin.utils.registrar import AdminRegistrar
admin_ = AdminRegistrar()
def register_all(admin_site: AdminSite):
register_all_defined(admin_site, str(Path(__file__).with_name('views')), __package__, admin_)
register_all_default(admin_site, 'web', BaseModelAdmin)
| 33.357143 | 97 | 0.768737 |
acf2dc7d0caaeb4c2a1565cd8f0533dc998e6094 | 198,526 | py | Python | pytests/fts/fts_base.py | couchbaselabs/testrunner-dbaas | 8f90f324c5e804fb1c8f46f5618b672611d856b6 | [
"Apache-2.0"
] | 1 | 2020-08-31T18:51:45.000Z | 2020-08-31T18:51:45.000Z | pytests/fts/fts_base.py | couchbaselabs/testrunner-dbaas | 8f90f324c5e804fb1c8f46f5618b672611d856b6 | [
"Apache-2.0"
] | null | null | null | pytests/fts/fts_base.py | couchbaselabs/testrunner-dbaas | 8f90f324c5e804fb1c8f46f5618b672611d856b6 | [
"Apache-2.0"
] | 2 | 2020-07-24T07:12:01.000Z | 2022-03-17T23:43:28.000Z | """
Base class for FTS/CBFT/Couchbase Full Text Search
"""
import unittest
import time
import copy
import logger
import logging
import re
import json
import math
import random
from couchbase_helper.cluster import Cluster
from membase.api.rest_client import RestConnection, Bucket
from membase.api.exception import ServerUnavailableException
from remote.remote_util import RemoteMachineShellConnection
from remote.remote_util import RemoteUtilHelper
from testconstants import STANDARD_BUCKET_PORT, LINUX_COUCHBASE_BIN_PATH, WIN_COUCHBASE_BIN_PATH, \
MAC_COUCHBASE_BIN_PATH
from membase.helper.cluster_helper import ClusterOperationHelper
from couchbase_helper.stats_tools import StatsCommon
from membase.helper.bucket_helper import BucketOperationHelper
from memcached.helper.data_helper import MemcachedClientHelper
from TestInput import TestInputSingleton
from lib.couchbase_helper.documentgenerator import GeoSpatialDataLoader, WikiJSONGenerator
from lib.memcached.helper.data_helper import KVStoreAwareSmartClient
from scripts.collect_server_info import cbcollectRunner
from couchbase_helper.documentgenerator import *
from couchbase_helper.documentgenerator import JsonDocGenerator
from lib.membase.api.exception import FTSException
from .es_base import ElasticSearchBase
from security.rbac_base import RbacBase
from lib.couchbase_helper.tuq_helper import N1QLHelper
from .random_query_generator.rand_query_gen import FTSESQueryGenerator
from security.ntonencryptionBase import ntonencryptionBase
from lib.ep_mc_bin_client import MemcachedClient
from lib.mc_bin_client import MemcachedClient as MC_MemcachedClient
from security.SecretsMasterBase import SecretsMasterBase
import server_ports
class RenameNodeException(FTSException):
"""Exception thrown when converting ip to hostname failed
"""
def __init__(self, msg=''):
FTSException.__init__(self, msg)
class RebalanceNotStopException(FTSException):
"""Exception thrown when stopping rebalance failed
"""
def __init__(self, msg=''):
FTSException.__init__(self, msg)
def raise_if(cond, ex):
"""Raise Exception if condition is True
"""
if cond:
raise ex
class OPS:
CREATE = "create"
UPDATE = "update"
DELETE = "delete"
APPEND = "append"
class EVICTION_POLICY:
VALUE_ONLY = "valueOnly"
FULL_EVICTION = "fullEviction"
class BUCKET_PRIORITY:
HIGH = "high"
class BUCKET_NAME:
DEFAULT = "default"
class OS:
WINDOWS = "windows"
LINUX = "linux"
OSX = "osx"
class COMMAND:
SHUTDOWN = "shutdown"
REBOOT = "reboot"
class STATE:
RUNNING = "running"
class CHECK_AUDIT_EVENT:
CHECK = False
class INDEX_DEFAULTS:
BLEVE_MAPPING = {
"mapping": {
"default_mapping": {
"enabled": True,
"dynamic": True,
"default_analyzer": ""
},
"type_field": "type",
"default_type": "_default",
"default_analyzer": "standard",
"default_datetime_parser": "dateTimeOptional",
"default_field": "_all",
"analysis": {}
}
}
ALIAS_DEFINITION = {"targets": {}}
PLAN_PARAMS = {}
SOURCE_CB_PARAMS = {
"authUser": "default",
"authPassword": "",
"authSaslUser": "",
"authSaslPassword": "",
"clusterManagerBackoffFactor": 0,
"clusterManagerSleepInitMS": 0,
"clusterManagerSleepMaxMS": 20000,
"dataManagerBackoffFactor": 0,
"dataManagerSleepInitMS": 0,
"dataManagerSleepMaxMS": 20000,
"feedBufferSizeBytes": 0,
"feedBufferAckThreshold": 0
}
SOURCE_FILE_PARAMS = {
"regExps": [
".txt$",
".md$"
],
"maxFileSize": 0,
"numPartitions": 0,
"sleepStartMS": 5000,
"backoffFactor": 1.5,
"maxSleepMS": 300000
}
INDEX_DEFINITION = {
"type": "fulltext-index",
"name": "",
"uuid": "",
"params": {},
"sourceType": "couchbase",
"sourceName": "default",
"sourceUUID": "",
"planParams": {}
}
class QUERY:
JSON = {
"indexName": "",
"size": 10,
"from": 0,
"explain": False,
"query": {},
"fields": [],
"ctl": {
"consistency": {
"level": "",
"vectors": {}
},
# "timeout": 60000 Optional timeout( 10000 by default).
# it's better to get rid of hardcoding
}
}
# Event Definition:
# https://github.com/couchbase/goxdcr/blob/master/etc/audit_descriptor.json
class NodeHelper:
_log = logger.Logger.get_logger()
@staticmethod
def disable_firewall(server):
"""Disable firewall to put restriction to replicate items in XDCR.
@param server: server object to disable firewall
@param rep_direction: replication direction unidirection/bidirection
"""
shell = RemoteMachineShellConnection(server)
shell.info = shell.extract_remote_info()
if shell.info.type.lower() == "windows":
output, error = shell.execute_command('netsh advfirewall set publicprofile state off')
shell.log_command_output(output, error)
output, error = shell.execute_command('netsh advfirewall set privateprofile state off')
shell.log_command_output(output, error)
# for details see RemoteUtilHelper.enable_firewall for windows
output, error = shell.execute_command('netsh advfirewall firewall delete rule name="block erl.exe in"')
shell.log_command_output(output, error)
output, error = shell.execute_command('netsh advfirewall firewall delete rule name="block erl.exe out"')
shell.log_command_output(output, error)
else:
o, r = shell.execute_command("iptables -F")
shell.log_command_output(o, r)
o, r = shell.execute_command(
"/sbin/iptables -A INPUT -p tcp -i eth0 --dport 1000:65535 -j ACCEPT")
shell.log_command_output(o, r)
o, r = shell.execute_command(
"/sbin/iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT")
shell.log_command_output(o, r)
# self.log.info("enabled firewall on {0}".format(server))
o, r = shell.execute_command("/sbin/iptables --list")
shell.log_command_output(o, r)
shell.disconnect()
@staticmethod
def reboot_server(server, test_case, wait_timeout=120):
"""Reboot a server and wait for couchbase server to run.
@param server: server object, which needs to be rebooted.
@param test_case: test case object, since it has assert() function
which is used by wait_for_ns_servers_or_assert
to throw assertion.
@param wait_timeout: timeout to whole reboot operation.
"""
# self.log.info("Rebooting server '{0}'....".format(server.ip))
shell = RemoteMachineShellConnection(server)
if shell.extract_remote_info().type.lower() == OS.WINDOWS:
o, r = shell.execute_command(
"{0} -r -f -t 0".format(COMMAND.SHUTDOWN))
elif shell.extract_remote_info().type.lower() == OS.LINUX:
o, r = shell.execute_command(COMMAND.REBOOT)
shell.log_command_output(o, r)
# wait for restart and warmup on all server
if shell.extract_remote_info().type.lower() == OS.WINDOWS:
time.sleep(wait_timeout * 5)
else:
time.sleep(wait_timeout // 6)
while True:
try:
# disable firewall on these nodes
NodeHelper.disable_firewall(server)
break
except BaseException:
print("Node not reachable yet, will try after 10 secs")
time.sleep(10)
# wait till server is ready after warmup
ClusterOperationHelper.wait_for_ns_servers_or_assert(
[server],
test_case,
wait_if_warmup=True)
@staticmethod
def enable_firewall(server):
"""Enable firewall
@param server: server object to enable firewall
@param rep_direction: replication direction unidirection/bidirection
"""
RemoteUtilHelper.enable_firewall(
server)
@staticmethod
def do_a_warm_up(server):
"""Warmp up server
"""
shell = RemoteMachineShellConnection(server)
shell.stop_couchbase()
time.sleep(5)
shell.start_couchbase()
shell.disconnect()
@staticmethod
def start_couchbase(server):
"""Warmp up server
"""
shell = RemoteMachineShellConnection(server)
shell.start_couchbase()
shell.disconnect()
@staticmethod
def stop_couchbase(server):
"""Warmp up server
"""
shell = RemoteMachineShellConnection(server)
shell.stop_couchbase()
shell.disconnect()
@staticmethod
def set_cbft_env_fdb_options(server):
shell = RemoteMachineShellConnection(server)
shell.stop_couchbase()
cmd = "sed -i 's/^export CBFT_ENV_OPTIONS.*$/" \
"export CBFT_ENV_OPTIONS=bleveMaxResultWindow=10000000," \
"forestdbCompactorSleepDuration={0},forestdbCompactionThreshold={1}/g'\
/opt/couchbase/bin/couchbase-server".format(
int(TestInputSingleton.input.param("fdb_compact_interval", None)),
int(TestInputSingleton.input.param("fdb_compact_threshold", None)))
shell.execute_command(cmd)
shell.start_couchbase()
shell.disconnect()
@staticmethod
def wait_service_started(server, wait_time=120):
"""Function will wait for Couchbase service to be in
running phase.
"""
shell = RemoteMachineShellConnection(server)
os_type = shell.extract_remote_info().distribution_type
if os_type.lower() == 'windows':
cmd = "sc query CouchbaseServer | grep STATE"
else:
cmd = "service couchbase-server status"
now = time.time()
while time.time() - now < wait_time:
output, _ = shell.execute_command(cmd)
if str(output).lower().find("running") != -1:
# self.log.info("Couchbase service is running")
return
time.sleep(10)
raise Exception(
"Couchbase service is not running after {0} seconds".format(
wait_time))
@staticmethod
def wait_warmup_completed(warmupnodes, bucket_names=["default"]):
if isinstance(bucket_names, str):
bucket_names = [bucket_names]
start = time.time()
for server in warmupnodes:
for bucket in bucket_names:
while time.time() - start < 150:
try:
mc = MemcachedClientHelper.direct_client(server, bucket)
if mc.stats()["ep_warmup_thread"] == "complete":
NodeHelper._log.info(
"Warmed up: %s items on %s on %s" %
(mc.stats("warmup")["ep_warmup_key_count"], bucket, server))
time.sleep(10)
break
elif mc.stats()["ep_warmup_thread"] == "running":
NodeHelper._log.info(
"Still warming up .. ep_warmup_key_count : %s" % (
mc.stats("warmup")["ep_warmup_key_count"]))
continue
else:
NodeHelper._log.info(
"Value of ep_warmup_thread does not exist, exiting from this server")
break
except Exception as e:
NodeHelper._log.info(e)
time.sleep(10)
if mc.stats()["ep_warmup_thread"] == "running":
NodeHelper._log.info(
"ERROR: ep_warmup_thread's status not complete")
mc.close()
@staticmethod
def wait_node_restarted(
server, test_case, wait_time=120, wait_if_warmup=False,
check_service=False):
"""Wait server to be re-started
"""
now = time.time()
if check_service:
NodeHelper.wait_service_started(server, wait_time)
wait_time = now + wait_time - time.time()
num = 0
while num < wait_time // 10:
try:
ClusterOperationHelper.wait_for_ns_servers_or_assert(
[server], test_case, wait_time=wait_time - num * 10,
wait_if_warmup=wait_if_warmup)
break
except ServerUnavailableException:
num += 1
time.sleep(10)
@staticmethod
def kill_erlang(server):
"""Kill erlang process running on server.
"""
NodeHelper._log.info("Killing erlang on server: {0}".format(server))
shell = RemoteMachineShellConnection(server)
os_info = shell.extract_remote_info()
shell.kill_erlang(os_info)
shell.start_couchbase()
shell.disconnect()
NodeHelper.wait_warmup_completed([server])
@staticmethod
def kill_memcached(server):
"""Kill memcached process running on server.
"""
shell = RemoteMachineShellConnection(server)
shell.kill_memcached()
shell.disconnect()
@staticmethod
def kill_cbft_process(server):
NodeHelper._log.info("Killing cbft on server: {0}".format(server))
shell = RemoteMachineShellConnection(server)
shell.kill_cbft_process()
shell.disconnect()
@staticmethod
def get_log_dir(node):
"""Gets couchbase log directory, even for cluster_run
"""
_, dir = RestConnection(node).diag_eval(
'filename:absname(element(2, application:get_env(ns_server,error_logger_mf_dir))).')
return str(dir)
@staticmethod
def get_data_dir(node):
"""Gets couchbase data directory, even for cluster_run
"""
_, dir = RestConnection(node).diag_eval(
'filename:absname(element(2, application:get_env(ns_server,path_config_datadir))).')
return str(dir).replace('\"', '')
@staticmethod
def rename_nodes(servers):
"""Rename server name from ip to their hostname
@param servers: list of server objects.
@return: dictionary whose key is server and value is hostname
"""
hostnames = {}
for server in servers:
shell = RemoteMachineShellConnection(server)
try:
hostname = shell.get_full_hostname()
rest = RestConnection(server)
renamed, content = rest.rename_node(
hostname, username=server.rest_username,
password=server.rest_password)
raise_if(
not renamed,
RenameNodeException(
"Server %s is not renamed! Hostname %s. Error %s" % (
server, hostname, content)
)
)
hostnames[server] = hostname
server.hostname = hostname
finally:
shell.disconnect()
return hostnames
# Returns version like "x.x.x" after removing build number
@staticmethod
def get_cb_version(node):
rest = RestConnection(node)
version = rest.get_nodes_self().version
return version[:version.rfind('-')]
@staticmethod
def get_cbcollect_info(server):
"""Collect cbcollectinfo logs for all the servers in the cluster.
"""
path = TestInputSingleton.input.param("logs_folder", "/tmp")
print(("grabbing cbcollect from {0}".format(server.ip)))
path = path or "."
try:
cbcollectRunner(server, path).run()
TestInputSingleton.input.test_params[
"get-cbcollect-info"] = False
except Exception as e:
NodeHelper._log.error(
"IMPOSSIBLE TO GRAB CBCOLLECT FROM {0}: {1}".format(
server.ip,
e))
@staticmethod
def collect_logs(server):
"""Grab cbcollect before we cleanup
"""
NodeHelper.get_cbcollect_info(server)
class FloatingServers:
"""Keep Track of free servers, For Rebalance-in
or swap-rebalance operations.
"""
_serverlist = []
class FTSIndex:
"""
To create a Full Text Search index :
e.g., FTSIndex("beer_index", self._cluster, source_type = 'couchbase',
source_name = 'beer-sample', index_type = 'fulltext-index',
index_params = {'store' : 'forestdb'},
plan_params = {'maxPartitionsPerIndex' : 40}
)
To create an FTS Alias:
FTSIndex("beer_index", self._cluster, source_type = 'couchbase',
source_name = 'beer-sample', index_type = 'alias',
index_params = {'store' : 'forestdb'},
plan_params = {'maxPartitionsPerIndex' : 40}
)
"""
def __init__(self, cluster, name, source_type='couchbase',
source_name=None, index_type='fulltext-index', index_params=None,
plan_params=None, source_params=None, source_uuid=None, dataset=None, index_storage_type=None):
"""
@param name : name of index/alias
@param cluster : 'this' cluster object
@param source_type : 'couchbase' or 'files'
@param source_name : name of couchbase bucket
@param index_type : 'fulltext-index' or 'fulltext-alias'
@param index_params : to specify advanced index mapping;
dictionary overiding params in
INDEX_DEFAULTS.BLEVE_MAPPING or
INDEX_DEFAULTS.ALIAS_DEFINITION depending on
index_type
@param plan_params : dictionary overriding params defined in
INDEX_DEFAULTS.PLAN_PARAMS
@param source_params: dictionary overriding params defined in
INDEX_DEFAULTS.SOURCE_CB_PARAMS or
INDEX_DEFAULTS.SOURCE_FILE_PARAMS
@param source_uuid: UUID of the source, may not be used
"""
self.__cluster = cluster
self.__log = cluster.get_logger()
self._source_type = source_type
self._source_name = source_name
self._one_time = False
self.index_type = index_type
if not index_storage_type:
self.index_storage_type = TestInputSingleton.input.param("index_type", None)
else:
self.index_storage_type = index_storage_type
self.num_pindexes = 0
self.index_definition = {
"type": "fulltext-index",
"name": "",
"uuid": "",
"params": {},
"sourceType": "couchbase",
"sourceName": "default",
"sourceUUID": "",
"planParams": {},
"sourceParams": {}
}
self.name = self.index_definition['name'] = name
self.es_custom_map = None
self.smart_query_fields = None
self.index_definition['type'] = self.index_type
if self.index_type == "fulltext-alias":
self.index_definition['sourceType'] = "nil"
self.index_definition['sourceName'] = ""
else:
self.source_bucket = self.__cluster.get_bucket_by_name(source_name)
self.index_definition['sourceType'] = self._source_type
self.index_definition['sourceName'] = self._source_name
self.dataset = dataset
if not self.dataset:
self.dataset = TestInputSingleton.input.param("dataset", "emp")
# Support for custom map
self.custom_map = TestInputSingleton.input.param("custom_map", False)
self.custom_map_add_non_indexed_fields = TestInputSingleton.input.param("custom_map_add_non_indexed_fields",
True)
self.num_custom_analyzers = TestInputSingleton.input.param("num_custom_analyzers", 0)
self.text_analyzer = TestInputSingleton.input.param("text_analyzer", None)
self.multiple_filters = TestInputSingleton.input.param("multiple_filters", False)
self.cm_id = TestInputSingleton.input.param("cm_id", 0)
if self.custom_map:
self.generate_new_custom_map(seed=self.cm_id)
self.fts_queries = []
if index_params:
self.index_definition['params'] = \
self.build_custom_index_params(index_params)
if plan_params:
self.index_definition['planParams'] = \
self.build_custom_plan_params(plan_params)
if source_params:
self.index_definition['sourceParams'] = {}
self.index_definition['sourceParams'] = source_params
if source_uuid:
self.index_definition['sourceUUID'] = source_uuid
self.index_definition['params']['store'] = {
"kvStoreName": "mossStore",
"mossStoreOptions": {}
}
if self.index_storage_type:
self.index_definition['params']['store']['indexType'] = self.index_storage_type
if TestInputSingleton.input.param("num_snapshots_to_keep", None):
self.index_definition['params']['store']['numSnapshotsToKeep'] = int(
TestInputSingleton.input.param(
"num_snapshots_to_keep",
None)
)
if TestInputSingleton.input.param("level_compaction", None):
self.index_definition['params']['store']['mossStoreOptions'] = {
"CompactionLevelMaxSegments": 9,
"CompactionPercentage": 0.6,
"CompactionLevelMultiplier": 3
}
if TestInputSingleton.input.param("moss_compact_threshold", None):
self.index_definition['params']['store'] \
['mossStoreOptions']['CompactionPercentage'] = int(
TestInputSingleton.input.param(
"moss_compact_threshold",
None)
)
if TestInputSingleton.input.param("memory_only", None):
self.index_definition['params']['store'] = \
{"kvStoreName": "moss",
"mossLowerLevelStoreName": ""}
self.moss_enabled = TestInputSingleton.input.param("moss", True)
if not self.moss_enabled:
if 'store' not in list(self.index_definition['params'].keys()):
self.index_definition['params']['store'] = {}
self.index_definition['params']['store']['kvStoreMossAllow'] = False
def is_scorch(self):
return self.get_index_type() == "scorch"
def is_upside_down(self):
return self.get_index_type() == "upside_down"
def is_type_unspecified(self):
return self.get_index_type() == None
def get_index_type(self):
try:
_, defn = self.get_index_defn()
index_type = defn['indexDef']['params']['store']['indexType']
self.__log.info("Index type of {0} is {1}".
format(self.name,
defn['indexDef']['params']['store']['indexType']))
return index_type
except Exception:
self.__log.error("No 'indexType' present in index definition")
return None
def generate_new_custom_map(self, seed):
from .custom_map_generator.map_generator import CustomMapGenerator
cm_gen = CustomMapGenerator(seed=seed, dataset=self.dataset,
num_custom_analyzers=self.num_custom_analyzers,
multiple_filters=self.multiple_filters,
custom_map_add_non_indexed_fields=self.custom_map_add_non_indexed_fields,
text_analyzer=self.text_analyzer)
fts_map, self.es_custom_map = cm_gen.get_map()
self.smart_query_fields = cm_gen.get_smart_query_fields()
print((self.smart_query_fields))
self.index_definition['params'] = self.build_custom_index_params(
fts_map)
if self.num_custom_analyzers > 0:
custom_analyzer_def = cm_gen.build_custom_analyzer()
self.index_definition["params"]["mapping"]["analysis"] = \
custom_analyzer_def
self.__log.info(json.dumps(self.index_definition["params"],
indent=3))
def update_custom_analyzer(self, seed):
"""
This method will update the custom analyzer in an index definition in 3 ways -
1) delete custom analyzer
2) remove a custom filter
3) change the custom analyzer used
"""
delete_custom_analyzer = TestInputSingleton.input.param \
("delete_custom_analyzer", False)
delete_custom_filter = TestInputSingleton.input.param \
("delete_custom_filter", False)
# Deleting custom analyzer in use
if delete_custom_analyzer:
self.index_definition["params"]["mapping"]["analysis"] = {}
else:
if delete_custom_filter:
custom_filters = self.index_definition["params"]["mapping"] \
["analysis"]["analyzers"]["customAnalyzer1"]["token_filters"]
for custom_filter in custom_filters:
self.__log.info("custom filter = " + custom_filter)
del self.index_definition['params']['mapping']['analysis'] \
['token_filters'][custom_filter]
else:
from .custom_map_generator.map_generator import CustomMapGenerator
cm_gen = CustomMapGenerator(seed=seed, dataset=self.dataset,
num_custom_analyzers=self.num_custom_analyzers,
multiple_filters=self.multiple_filters)
if self.num_custom_analyzers > 0:
custom_analyzer_def = cm_gen.build_custom_analyzer()
self.index_definition["params"]["mapping"]["analysis"] = \
custom_analyzer_def
def build_custom_index_params(self, index_params):
if self.index_type == "fulltext-index":
mapping = INDEX_DEFAULTS.BLEVE_MAPPING
if self.custom_map:
if not TestInputSingleton.input.param("default_map", False):
mapping['mapping']['default_mapping']['enabled'] = False
mapping['mapping'].update(index_params)
else:
mapping = {"targets": {}}
mapping.update(index_params)
return mapping
def build_custom_plan_params(self, plan_params):
plan = INDEX_DEFAULTS.PLAN_PARAMS
plan.update(plan_params)
return plan
def add_child_field_to_default_mapping(self, field_name, field_type,
field_alias=None, analyzer=None):
"""
This method will add a field mapping to a default mapping
"""
fields = str.split(field_name, '.')
nesting_level = len(fields)
child_map = {}
child_map['dynamic'] = False
child_map['enabled'] = True
child_map['properties'] = {}
child_field = {}
child_field['dynamic'] = False
child_field['enabled'] = True
if not field_alias:
field_alias = fields[len(fields) - 1]
child_field['fields'] = [
{
"analyzer": analyzer,
"display_order": "0",
"include_in_all": True,
"include_term_vectors": True,
"index": True,
"name": field_alias,
"store": True,
"type": field_type
}
]
field_maps = []
field_maps.append(child_field)
if nesting_level > 1:
for x in range(0, nesting_level - 1):
field = fields.pop()
# Do a deepcopy of child_map into field_map since we dont
# want to have child_map altered because of changes on field_map
field_map = copy.deepcopy(child_map)
field_map['properties'][field] = field_maps.pop()
field_maps.append(field_map)
map = {}
if 'mapping' not in self.index_definition['params']:
map['default_mapping'] = {}
map['default_mapping']['properties'] = {}
map['default_mapping']['dynamic'] = False
map['default_mapping']['enabled'] = True
map['default_mapping']['properties'][fields.pop()] = field_maps.pop()
self.index_definition['params']['mapping'] = map
else:
self.index_definition['params']['mapping']['default_mapping'] \
['properties'][fields.pop()] = field_maps.pop()
def add_analyzer_to_existing_field_map(self, field_name, field_type,
field_alias=None, analyzer=None):
"""
Add another field mapping with a different analyzer to an existing field map.
Can be enhanced to update other fields as well if required.
"""
fields = str.split(field_name, '.')
if not field_alias:
field_alias = fields[len(fields) - 1]
child_field = {
"analyzer": analyzer,
"display_order": "0",
"include_in_all": True,
"include_term_vectors": True,
"index": True,
"name": field_alias,
"store": True,
"type": field_type
}
map = copy.deepcopy(self.index_definition['params']['mapping']
['default_mapping']['properties'])
map = self.update_nested_field_mapping(fields[len(fields) - 1],
child_field, map)
self.index_definition['params']['mapping']['default_mapping'] \
['properties'] = map
def update_nested_field_mapping(self, key, value, map):
"""
Recurse through a given nested field mapping, and append the leaf node with the specified value.
Can be enhanced to update the current value as well if required.
"""
for k, v in list(map.items()):
if k == key:
map[k]['fields'].append(value)
return map
else:
if 'properties' in map[k]:
map[k]['properties'] = \
self.update_nested_field_mapping(key, value,
map[k]['properties'])
return map
def add_type_mapping_to_index_definition(self, type, analyzer):
"""
Add Type Mapping to Index Definition (and disable default mapping)
"""
type_map = {}
type_map[type] = {}
type_map[type]['default_analyzer'] = analyzer
type_map[type]['display_order'] = 0
type_map[type]['dynamic'] = True
type_map[type]['enabled'] = True
if 'mapping' not in self.index_definition['params']:
self.index_definition['params']['mapping'] = {}
self.index_definition['params']['mapping']['default_mapping'] = {}
self.index_definition['params']['mapping']['default_mapping'] \
['properties'] = {}
self.index_definition['params']['mapping']['default_mapping'] \
['dynamic'] = False
self.index_definition['params']['mapping']['default_mapping'] \
['enabled'] = False
if 'types' not in self.index_definition['params']['mapping']:
self.index_definition['params']['mapping']['types'] = {}
self.index_definition['params']['mapping']['types'] = type_map
else:
self.index_definition['params']['mapping']['types'][type] = type_map[type]
def add_doc_config_to_index_definition(self, mode):
"""
Add Document Type Configuration to Index Definition
Note: These regexps have been constructed keeping
travel-sample dataset in mind (keys like 'airline_1023')
"""
doc_config = {}
if mode == 'docid_regexp1':
doc_config['mode'] = 'docid_regexp'
doc_config['docid_regexp'] = "([^_]*)"
if mode == 'docid_regexp2':
doc_config['mode'] = 'docid_regexp'
# a seq of 6 or more letters
doc_config['docid_regexp'] = "\\b[a-z]{6,}"
if mode == 'docid_regexp_neg1':
doc_config['mode'] = 'docid_regexp'
# a seq of 8 or more letters
doc_config['docid_regexp'] = "\\b[a-z]{8,}"
if mode == 'docid_prefix':
doc_config['mode'] = 'docid_prefix'
doc_config['docid_prefix_delim'] = "_"
if mode == 'docid_prefix_neg1':
doc_config['mode'] = 'docid_prefix'
doc_config['docid_prefix_delim'] = "-"
if mode == 'type_field':
doc_config['mode'] = 'type_field'
doc_config['type_field'] = "type"
if mode == 'type_field_neg1':
doc_config['mode'] = 'type_field'
doc_config['type_field'] = "newtype"
self.index_definition['params']['doc_config'] = {}
self.index_definition['params']['doc_config'] = doc_config
def get_rank_of_doc_in_search_results(self, content, doc_id):
"""
Fetch rank of a given document in Search Results
"""
try:
return content.index(doc_id) + 1
except Exception as err:
self.__log.info("Doc ID %s not found in search results." % doc_id)
return -1
def create(self, rest=None):
self.__log.info("Checking if index already exists ...")
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
status, _ = rest.get_fts_index_definition(self.name)
if status != 400:
rest.delete_fts_index(self.name)
self.__log.info("Creating {0} {1} on {2}".format(
self.index_type,
self.name,
rest.ip))
rest.create_fts_index(self.name, self.index_definition)
self.__cluster.get_indexes().append(self)
def update(self, rest=None):
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
self.__log.info("Updating {0} {1} on {2}".format(
self.index_type,
self.name,
rest.ip))
rest.update_fts_index(self.name, self.index_definition)
#self.__log.info("sleeping for 200")
#time.sleep(200)
def update_index_to_upside_down(self):
if self.is_upside_down():
self.__log.info("The index {0} is already upside_down index, conversion not needed!")
else:
self.index_definition['params']['store']['indexType'] = "upside_down"
self.index_definition['uuid'] = self.get_uuid()
self.update()
time.sleep(5)
_, defn = self.get_index_defn()
if defn['indexDef']['params']['store']['indexType'] == "upside_down":
self.__log.info("SUCCESS: The index type is now upside_down!")
else:
self.__log.error("defn['indexDef']['params']['store']['indexType']")
raise Exception("Unable to convert index to upside_down")
def update_index_to_scorch(self):
if self.is_scorch():
self.__log.info("The index {0} is already scorch index, conversion not needed!")
else:
self.index_definition['params']['store']['indexType'] = "scorch"
self.index_definition['uuid'] = self.get_uuid()
self.update()
time.sleep(5)
_, defn = self.get_index_defn()
if defn['indexDef']['params']['store']['indexType'] == "scorch":
self.__log.info("SUCCESS: The index type is now scorch!")
else:
self.__log.error("defn['indexDef']['params']['store']['indexType']")
raise Exception("Unable to convert index to scorch")
def update_num_pindexes(self, new):
self.index_definition['planParams']['maxPartitionsPerPIndex'] = new
self.index_definition['uuid'] = self.get_uuid()
self.update()
def update_index_partitions(self, new):
status, index_def = self.get_index_defn()
self.index_definition = index_def["indexDef"]
self.index_definition['planParams']['indexPartitions'] = new
self.index_definition['uuid'] = self.get_uuid()
self.update()
def update_docvalues_email_custom_index(self, new):
status, index_def = self.get_index_defn()
self.index_definition = index_def["indexDef"]
self.index_definition['params']['mapping']['types']['emp']['properties']['join_date']['fields'][0][
'docvalues'] = new
self.index_definition['uuid'] = self.get_uuid()
self.update()
def update_num_replicas(self, new):
self.index_definition['planParams']['numReplicas'] = new
self.index_definition['uuid'] = self.get_uuid()
self.update()
def delete(self, rest=None):
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
self.__log.info("Deleting {0} {1} on {2}".format(
self.index_type,
self.name,
rest.ip))
status = rest.delete_fts_index(self.name)
if status:
self.__cluster.get_indexes().remove(self)
if not self.__cluster.are_index_files_deleted_from_disk(self.name):
self.__log.error("Status: {0} but index file for {1} not yet "
"deleted!".format(status, self.name))
else:
self.__log.info("Validated: all index files for {0} deleted from "
"disk".format(self.name))
else:
raise FTSException("Index/alias {0} not deleted".format(self.name))
def get_index_defn(self, rest=None):
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
return rest.get_fts_index_definition(self.name)
def get_max_partitions_pindex(self):
_, defn = self.get_index_defn()
return int(defn['indexDef']['planParams']['maxPartitionsPerPIndex'])
def clone(self, clone_name):
pass
def get_indexed_doc_count(self, rest=None):
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
return rest.get_fts_index_doc_count(self.name)
def get_num_mutations_to_index(self, rest=None):
if not rest:
rest = RestConnection(self.__cluster.get_random_fts_node())
status, stat_value = rest.get_fts_stats(index_name=self.name,
bucket_name=self._source_name,
stat_name='num_mutations_to_index')
return stat_value
def get_src_bucket_doc_count(self):
return self.__cluster.get_doc_count_in_bucket(self.source_bucket)
def get_uuid(self):
rest = RestConnection(self.__cluster.get_random_fts_node())
return rest.get_fts_index_uuid(self.name)
def construct_cbft_query_json(self, query, fields=None, timeout=60000,
facets=False,
sort_fields=None,
explain=False,
show_results_from_item=0,
highlight=False,
highlight_style=None,
highlight_fields=None,
consistency_level='',
consistency_vectors={},
score=''):
max_matches = TestInputSingleton.input.param("query_max_matches", 10000000)
max_limit_matches = TestInputSingleton.input.param("query_limit_matches", None)
query_json = copy.deepcopy(QUERY.JSON)
# query is a unicode dict
query_json['query'] = query
query_json['indexName'] = self.name
query_json['explain'] = explain
if max_matches is not None and max_matches != 'None':
query_json['size'] = int(max_matches)
else:
del query_json['size']
if max_limit_matches is not None:
query_json['limit'] = int(max_limit_matches)
if show_results_from_item:
query_json['from'] = int(show_results_from_item)
if timeout is not None:
query_json['ctl']['timeout'] = int(timeout)
if fields:
query_json['fields'] = fields
if facets:
query_json['facets'] = self.construct_facets_definition()
if sort_fields:
query_json['sort'] = sort_fields
if highlight:
query_json['highlight'] = {}
if highlight_style:
query_json['highlight']['style'] = highlight_style
if highlight_fields:
query_json['highlight']['fields'] = highlight_fields
if consistency_level is None:
del query_json['ctl']['consistency']['level']
else:
query_json['ctl']['consistency']['level'] = consistency_level
if consistency_vectors is None:
del query_json['ctl']['consistency']['vectors']
elif consistency_vectors != {}:
query_json['ctl']['consistency']['vectors'] = consistency_vectors
if score != '':
query_json['score'] = "none"
return query_json
def construct_facets_definition(self):
"""
Constructs the facets definition of the query json
"""
facets = TestInputSingleton.input.param("facets", None).split(",")
size = TestInputSingleton.input.param("facets_size", 10)
terms_field = "dept"
terms_facet_name = "Department"
numeric_range_field = "salary"
numeric_range_facet_name = "Salaries"
date_range_field = "join_date"
date_range_facet_name = "No. of Years"
facet_definition = {}
date_range_buckets = [
{"name": "1 year", "start": "2015-08-01"},
{"name": "2-5 years", "start": "2011-08-01", "end": "2015-07-31"},
{"name": "6-10 years", "start": "2006-08-01", "end": "2011-07-31"},
{"name": "10+ years", "end": "2006-07-31"}
]
numeric_range_buckets = [
{"name": "high salary", "min": 150001},
{"name": "average salary", "min": 110001, "max": 150000},
{"name": "low salary", "max": 110000}
]
for facet in facets:
if facet == 'terms':
facet_definition[terms_facet_name] = {}
facet_definition[terms_facet_name]['field'] = terms_field
facet_definition[terms_facet_name]['size'] = size
if facet == 'numeric_ranges':
facet_definition[numeric_range_facet_name] = {}
facet_definition[numeric_range_facet_name]['field'] = \
numeric_range_field
facet_definition[numeric_range_facet_name]['size'] = size
facet_definition[numeric_range_facet_name]['numeric_ranges'] = []
for bucket in numeric_range_buckets:
facet_definition[numeric_range_facet_name] \
['numeric_ranges'].append(bucket)
if facet == 'date_ranges':
facet_definition[date_range_facet_name] = {}
facet_definition[date_range_facet_name]['field'] = \
date_range_field
facet_definition[date_range_facet_name]['size'] = size
facet_definition[date_range_facet_name]['date_ranges'] = []
for bucket in date_range_buckets:
facet_definition[date_range_facet_name] \
['date_ranges'].append(bucket)
return facet_definition
def execute_query(self, query, zero_results_ok=True, expected_hits=None,
return_raw_hits=False, sort_fields=None,
explain=False, show_results_from_item=0, highlight=False,
highlight_style=None, highlight_fields=None, consistency_level='',
consistency_vectors={}, timeout=60000, rest=None, score='', expected_no_of_results=None):
"""
Takes a query dict, constructs a json, runs and returns results
"""
query_dict = self.construct_cbft_query_json(query,
sort_fields=sort_fields,
explain=explain,
show_results_from_item=show_results_from_item,
highlight=highlight,
highlight_style=highlight_style,
highlight_fields=highlight_fields,
consistency_level=consistency_level,
consistency_vectors=consistency_vectors,
timeout=timeout,
score=score)
hits = -1
matches = []
doc_ids = []
time_taken = 0
status = {}
try:
if timeout == 0:
# force limit in 10 min in case timeout=0(no timeout)
rest_timeout = 600
else:
rest_timeout = timeout // 1000 + 10
hits, matches, time_taken, status = \
self.__cluster.run_fts_query(self.name, query_dict, timeout=rest_timeout)
except ServerUnavailableException:
# query time outs
raise ServerUnavailableException
except Exception as e:
self.__log.error("Error running query: %s" % e)
if hits:
for doc in matches:
doc_ids.append(doc['id'])
if int(hits) == 0 and not zero_results_ok:
self.__log.info("ERROR: 0 hits returned!")
raise FTSException("No docs returned for query : %s" % query_dict)
if expected_hits and expected_hits != hits:
self.__log.info("ERROR: Expected hits: %s, fts returned: %s"
% (expected_hits, hits))
raise FTSException("Expected hits: %s, fts returned: %s"
% (expected_hits, hits))
if expected_hits and expected_hits == hits:
self.__log.info("SUCCESS! Expected hits: %s, fts returned: %s"
% (expected_hits, hits))
if expected_no_of_results is not None:
if expected_no_of_results == doc_ids.__len__():
self.__log.info("SUCCESS! Expected number of results: %s, fts returned: %s"
% (expected_no_of_results, doc_ids.__len__()))
else:
self.__log.info("ERROR! Expected number of results: %s, fts returned: %s"
% (expected_no_of_results, doc_ids.__len__()))
print(doc_ids)
raise FTSException("Expected number of results: %s, fts returned: %s"
% (expected_no_of_results, doc_ids.__len__()))
if not return_raw_hits:
return hits, doc_ids, time_taken, status
else:
return hits, matches, time_taken, status
def execute_query_with_facets(self, query, zero_results_ok=True,
expected_hits=None):
"""
Takes a query dict with facet definition, constructs a json,
runs and returns results
"""
query_dict = self.construct_cbft_query_json(query, facets=True)
hits = -1
matches = []
doc_ids = []
time_taken = 0
status = {}
try:
hits, matches, time_taken, status, facets = \
self.__cluster.run_fts_query_with_facets(self.name, query_dict)
except ServerUnavailableException:
# query time outs
raise ServerUnavailableException
except Exception as e:
self.__log.error("Error running query: %s" % e)
if hits:
for doc in matches:
doc_ids.append(doc['id'])
if int(hits) == 0 and not zero_results_ok:
raise FTSException("No docs returned for query : %s" % query_dict)
if expected_hits and expected_hits != hits:
raise FTSException("Expected hits: %s, fts returned: %s"
% (expected_hits, hits))
if expected_hits and expected_hits == hits:
self.__log.info("SUCCESS! Expected hits: %s, fts returned: %s"
% (expected_hits, hits))
return hits, doc_ids, time_taken, status, facets
def validate_facets_in_search_results(self, no_of_hits, facets_returned):
"""
Validate the facet data returned in the query response JSON.
"""
facets = TestInputSingleton.input.param("facets", None).split(",")
size = TestInputSingleton.input.param("facets_size", 10)
field_indexed = TestInputSingleton.input.param("field_indexed", True)
terms_facet_name = "Department"
numeric_range_facet_name = "Salaries"
date_range_facet_name = "No. of Years"
for facet in facets:
if facet == 'terms':
facet_name = terms_facet_name
if facet == 'numeric_ranges':
facet_name = numeric_range_facet_name
if facet == 'date_ranges':
facet_name = date_range_facet_name
# Validate Facet name
if facet_name not in facets_returned:
raise FTSException(facet_name + " not present in the "
"search results")
# Validate Total No. with no. of hits. It can be unequal if
# the field is not indexed, but not otherwise.
total_count = facets_returned[facet_name]['total']
missing_count = facets_returned[facet_name]['missing']
others_count = facets_returned[facet_name]['other']
if not total_count == no_of_hits:
if field_indexed:
raise FTSException("Total count of results in " + facet_name
+ " Facet (" + str(total_count) +
") is not equal to total hits in search "
"results (" + str(no_of_hits) + ")")
else:
if not ((missing_count == no_of_hits) and (total_count == 0)):
raise FTSException("Field not indexed, but counts "
"are not expected")
# Validate only if there are some search results
if not total_count == 0:
# Validate no. of terms returned, and it should be <= size
no_of_buckets_in_facet = len(facets_returned[facet_name] \
[facet])
if no_of_buckets_in_facet > size:
raise FTSException("Total no. of buckets in facets (" +
no_of_buckets_in_facet +
") exceeds the size defined ("
+ str(size) + ")")
# Validate count in each facet and total it up.
# Should be Total - missing - others
total_count_in_buckets = 0
for bucket in facets_returned[facet_name][facet]:
self.__log.info(bucket)
total_count_in_buckets += bucket['count']
if not total_count_in_buckets == (total_count - missing_count -
others_count):
raise FTSException("Total count (%d) in buckets not correct"
% total_count_in_buckets)
if not self.validate_query_run_with_facet_data \
(query=TestInputSingleton.input.param("query", ""),
facets_returned=facets_returned, facet_type=facet):
raise FTSException("Requerying returns different results "
"than expected")
else:
self.__log.info("Zero total count in facet.")
self.__log.info("Validated Facets in search results")
def validate_query_run_with_facet_data(self, query, facets_returned,
facet_type):
"""
Form a query based on the facet data and check the # hits.
"""
if facet_type == 'terms':
facet_name = 'Department'
field_name = 'dept'
value = facets_returned[facet_name][facet_type][0]['term']
expected_hits = facets_returned[facet_name][facet_type][0]['count']
new_query = "{\"conjuncts\" :[" + query + ",{\"match\":\"" + \
value + "\", \"field\":\"" + field_name + "\"}]}"
if facet_type == 'numeric_ranges':
facet_name = 'Salaries'
field_name = 'salary'
max_value = None
min_value = None
min_value_query = ""
max_value_query = ""
try:
max_value = facets_returned[facet_name][facet_type][0]['max']
max_value_query = ",{\"inclusive_max\":true, \"field\":\"" \
+ field_name + "\", \"max\":" + \
str(max_value) + "}"
except:
self.__log.info("max key doesnt exist for Salary facet")
try:
min_value = facets_returned[facet_name][facet_type][0]['min']
min_value_query = ",{\"inclusive_min\":true, \"field\":\"" \
+ field_name + "\", \"min\":" + \
str(min_value) + "}"
except:
self.__log.info("min key doesnt exist for Salary facet")
expected_hits = facets_returned[facet_name][facet_type][0]['count']
new_query = "{\"conjuncts\" :[" + query + min_value_query + \
max_value_query + "]}"
if facet_type == 'date_ranges':
facet_name = 'No. of Years'
field_name = 'join_date'
end_value = None
start_value = None
start_value_query = ""
end_value_query = ""
try:
end_value = facets_returned[facet_name][facet_type][0]['end']
end_value_query = ",{\"inclusive_end\":true, \"field\":\"" + \
field_name + "\", \"end\":\"" + \
end_value + "\"}"
except:
self.__log.info("end key doesnt exist for No. of Years facet")
try:
start_value = facets_returned[facet_name][facet_type][0]['start']
start_value_query = ",{\"inclusive_start\":true, \"field\":\"" \
+ field_name + "\", \"start\":\"" + \
start_value + "\"}"
except:
self.__log.info("start key doesnt exist for No. of Years facet")
expected_hits = facets_returned[facet_name][facet_type][0]['count']
new_query = "{\"conjuncts\" :[" + query + end_value_query + \
start_value_query + "]}"
self.__log.info(new_query)
new_query = json.loads(new_query)
hits, _, _, _ = self.execute_query(query=new_query,
zero_results_ok=True,
expected_hits=expected_hits)
if not hits == expected_hits:
return False
else:
return True
def validate_sorted_results(self, raw_hits, sort_fields):
"""
Validate if the docs returned in the search result match the expected values
"""
result = False
expected_docs = TestInputSingleton.input.param("expected", None)
docs = []
# Fetch the Doc IDs from raw_hits
for doc in raw_hits:
docs.append(doc['id'])
if expected_docs:
expected_docs = expected_docs.split(',')
# Compare docs with the expected values.
if docs == expected_docs:
result = True
else:
# Sometimes, if there are two docs with same field value, their rank
# may be interchanged. To handle this, if the actual doc order
# doesn't match the expected value, swap the two such docs and then
# try to match
tolerance = TestInputSingleton.input.param("tolerance", None)
if tolerance:
tolerance = tolerance.split(',')
index1, index2 = expected_docs.index(
tolerance[0]), expected_docs.index(tolerance[1])
expected_docs[index1], expected_docs[index2] = expected_docs[
index2], \
expected_docs[
index1]
if docs == expected_docs:
result = True
else:
self.__log.info("Actual docs returned : %s", docs)
self.__log.info("Expected docs : %s", expected_docs)
return False
else:
self.__log.info("Actual docs returned : %s", docs)
self.__log.info("Expected docs : %s", expected_docs)
return False
else:
self.__log.info("Expected doc order not specified. It is a negative"
" test, so skipping order validation")
result = True
# Validate the sort fields in the result
for doc in raw_hits:
if 'sort' in list(doc.keys()):
if not sort_fields and len(doc['sort']) == 1:
result &= True
elif len(doc['sort']) == len(sort_fields):
result &= True
else:
self.__log.info("Sort fields do not match for the following document - ")
self.__log.info(doc)
return False
return result
def validate_snippet_highlighting_in_result_content(self, contents, doc_id,
field_names, terms,
highlight_style=None):
'''
Validate the snippets and highlighting in the result content for a given
doc id
:param contents: Result contents
:param doc_id: Doc ID to check highlighting/snippet for
:param field_names: Field name for which term is to be validated
:param terms: search term which should be highlighted
:param highlight_style: Expected highlight style - ansi/html
:return: True/False
'''
validation = True
for content in contents:
if content['id'] == doc_id:
# Check if Location section is present for the document in the search results
if 'locations' in content:
validation &= True
else:
self.__log.info(
"Locations not present in the search result")
validation &= False
# Check if Fragments section is present in the document in the search results
# If present, check if the search term is highlighted
if 'fragments' in content:
snippet = content['fragments'][field_names][0]
# Replace the Ansi highlight tags with <mark> since the
# ansi ones render themselves hence cannot be compared.
if highlight_style == 'ansi':
snippet = snippet.replace('\x1b[43m', '<mark>').replace(
'\x1b[0m', '</mark>')
search_term = '<mark>' + terms + '</marks>'
found = snippet.find(search_term)
if not found:
self.__log.info("Search term not highlighted")
validation &= found
else:
self.__log.info(
"Fragments not present in the search result")
validation &= False
# If the test is a negative testcase to check if snippet, flip the result
if TestInputSingleton.input.param("negative_test", False):
validation = ~validation
return validation
def validate_snippet_highlighting_in_result_content_n1ql(self, contents, doc_id,
field_names, terms,
highlight_style=None):
'''
Validate the snippets and highlighting in the result content for a given
doc id
:param contents: Result contents
:param doc_id: Doc ID to check highlighting/snippet for
:param field_names: Field name for which term is to be validated
:param terms: search term which should be highlighted
:param highlight_style: Expected highlight style - ansi/html
:return: True/False
'''
validation = True
for content in contents:
if content['meta']['id'] == doc_id:
# Check if Location section is present for the document in the search results
if 'locations' in content['meta']:
validation &= True
else:
self.__log.info(
"Locations not present in the search result")
validation &= False
# Check if Fragments section is present in the document in the search results
# If present, check if the search term is highlighted
if 'fragments' in content['meta']:
snippet = content['meta']['fragments'][field_names][0]
# Replace the Ansi highlight tags with <mark> since the
# ansi ones render themselves hence cannot be compared.
if highlight_style == 'ansi':
snippet = snippet.replace('\x1b[43m', '<mark>').replace(
'\x1b[0m', '</mark>')
search_term = '<mark>' + terms + '</mark>'
found = snippet.find(search_term)
if found < 0:
self.__log.info("Search term not highlighted")
validation &= (found >= 0)
else:
self.__log.info(
"Fragments not present in the search result")
validation &= False
# If the test is a negative testcase to check if snippet, flip the result
if TestInputSingleton.input.param("negative_test", False):
validation = ~validation
return validation
def get_score_from_query_result_content(self, contents, doc_id):
for content in contents:
if content['id'] == doc_id:
return content['score']
def is_doc_present_in_query_result_content(self, contents, doc_id):
for content in contents:
if content['id'] == doc_id:
return True
return False
def get_detailed_scores_for_doc(self, doc_id, search_results, weight,
searchTerm):
"""
Parses the search results content and extracts the desired score component
:param doc_id: Doc ID for which detailed score is requested
:param search_results: Search results contents
:param weight: component of score - queryWeight/fieldWeight/coord
:param searchTerm: searchTerm for which score component is required
:return: Individual Score components
"""
tf_score = 0
idf_score = 0
field_norm_score = 0
coord_score = 0
query_norm_score = 0
for doc in search_results:
if doc['id'] == doc_id:
if 'children' in doc['explanation']:
tree = self.find_node_in_score_tree(
doc['explanation']['children'], weight, searchTerm)
if 'children' in tree:
tf_score, field_norm_score, idf_score, query_norm_score, \
coord_score = self.extract_detailed_score_from_node(
tree['children'])
else:
nodes = []
nodes.append(tree)
tf_score, field_norm_score, idf_score, query_norm_score, \
coord_score = self.extract_detailed_score_from_node(
nodes)
else:
tf_score, field_norm_score, idf_score, query_norm_score, \
coord_score = self.extract_detailed_score_from_node(
doc['explanation'])
return tf_score, field_norm_score, idf_score, query_norm_score, coord_score
def find_node_in_score_tree(self, tree, weight, searchTerm):
"""
Finds the node that contains the desired score component in the tree
structure containing the score explanation
"""
while True:
newSubnodes = []
for node in tree:
if (weight in node['message']) and (
searchTerm in node['message']):
self.__log.info("Found it")
return node
if 'children' in node:
if len(node['children']) == 0:
break
for subnode in node['children']:
if (weight in subnode['message']) and (
searchTerm in subnode['message']):
self.__log.info("Found it")
return subnode
else:
if 'children' in subnode:
for subsubnode in subnode['children']:
newSubnodes.append(subsubnode)
tree = copy.deepcopy(newSubnodes)
return None
def extract_detailed_score_from_node(self, tree):
"""
Extracts the score components from the node containing it.
"""
tf_score = 0
idf_score = 0
field_norm_score = 0
coord_score = 0
query_norm_score = 0
for item in tree:
if 'termFreq' in item['message']:
tf_score = item['value']
if 'fieldNorm' in item['message']:
field_norm_score = item['value']
if 'idf' in item['message']:
idf_score = item['value']
if 'queryNorm' in item['message']:
query_norm_score = item['value']
if 'coord' in item['message']:
coord_score = item['value']
return tf_score, field_norm_score, idf_score, query_norm_score, coord_score
class CouchbaseCluster:
def __init__(self, name, nodes, log, use_hostname=False, sdk_compression=True):
"""
@param name: Couchbase cluster name. e.g C1, C2 to distinguish in logs.
@param nodes: list of server objects (read from ini file).
@param log: logger object to print logs.
@param use_hostname: True if use node's hostname rather ip to access
node else False.
"""
self.__name = name
self.__nodes = nodes
self.__log = log
self.__mem_quota = 0
self.__use_hostname = use_hostname
self.__master_node = nodes[0]
self.__design_docs = []
self.__buckets = []
self.__hostnames = {}
self.__fail_over_nodes = []
self.__data_verified = True
self.__remote_clusters = []
self.__clusterop = Cluster()
self._kv_gen = {}
self.__indexes = []
self.__fts_nodes = []
self.__non_fts_nodes = []
# to avoid querying certain nodes that undergo crash/reboot scenarios
self.__bypass_fts_nodes = []
self.__bypass_n1ql_nodes = []
self.__separate_nodes_on_services()
self.__set_fts_ram_quota()
self.sdk_compression = sdk_compression
def __str__(self):
return "Couchbase Cluster: %s, Master Ip: %s" % (
self.__name, self.__master_node.ip)
def __set_fts_ram_quota(self):
fts_quota = TestInputSingleton.input.param("fts_quota", None)
if fts_quota:
RestConnection(self.__master_node).set_fts_ram_quota(fts_quota)
def get_node(self, ip, port):
if len(self.__nodes) == 1:
return self.__nodes[0]
for node in self.__nodes:
self.__log.info("ip={}==node.ip={},port={}==node.port={},".format(ip,node.ip,port,
node.port))
if ip == node.ip and port == node.port:
self.__log.info("-->matched service: {}".format(node))
return node
else:
self.__log.info("-->un matched service: {}".format(node))
def get_logger(self):
return self.__log
def is_cluster_run(self):
cluster_run = False
for server in self.__nodes:
if server.ip == "127.0.0.1":
cluster_run = True
return cluster_run
def __separate_nodes_on_services(self):
self.__fts_nodes = []
self.__n1ql_nodes = []
self.__non_fts_nodes = []
service_map = RestConnection(self.__master_node).get_nodes_services()
self.__log.info("-->service_map={}".format(service_map))
for node_ip, services in list(service_map.items()):
if self.is_cluster_run():
# if cluster-run and ip not 127.0.0.1
ip = "127.0.0.1"
else:
ip = node_ip.rsplit(':', 1)[0]
if TestInputSingleton.input.param("is_secure", False):
node_port = server_ports.ssl_rest_port
else:
node_port = node_ip.rsplit(':', 1)[1]
node = self.get_node(ip, node_port)
if node:
if "fts" in services:
self.__fts_nodes.append(node)
else:
self.__non_fts_nodes.append(node)
if "n1ql" in services:
self.__n1ql_nodes.append(node)
def get_fts_nodes(self):
self.__separate_nodes_on_services()
return self.__fts_nodes
def get_num_fts_nodes(self):
return len(self.get_fts_nodes())
def get_non_fts_nodes(self):
self.__separate_nodes_on_services()
return self.__non_fts_nodes
def __stop_rebalance(self):
rest = RestConnection(self.__master_node)
if rest._rebalance_progress_status() == 'running':
self.__log.warning(
"rebalancing is still running, test should be verified")
stopped = rest.stop_rebalance()
raise_if(
not stopped,
RebalanceNotStopException("unable to stop rebalance"))
def __init_nodes(self):
"""Initialize all nodes. Rename node to hostname
if needed by test.
"""
tasks = []
for node in self.__nodes:
tasks.append(
self.__clusterop.async_init_node(
node))
for task in tasks:
mem_quota = task.result()
if mem_quota < self.__mem_quota or self.__mem_quota == 0:
self.__mem_quota = mem_quota
if self.__use_hostname:
self.__hostnames.update(NodeHelper.rename_nodes(self.__nodes))
def get_host_names(self):
return self.__hostnames
def get_master_node(self):
return self.__master_node
def get_indexes(self):
return self.__indexes
def set_bypass_fts_node(self, node):
self.__bypass_fts_nodes.append(node)
def get_random_node(self):
return self.__nodes[random.randint(0, len(self.__nodes) - 1)]
def get_random_fts_node(self):
self.__separate_nodes_on_services()
for node in self.__bypass_fts_nodes:
self.__fts_nodes.remove(node)
if not self.__fts_nodes:
raise FTSException("No node in the cluster has 'fts' service"
" enabled")
if len(self.__fts_nodes) == 1:
return self.__fts_nodes[0]
return self.__fts_nodes[random.randint(0, len(self.__fts_nodes) - 1)]
def get_random_n1ql_node(self):
self.__separate_nodes_on_services()
for node in self.__bypass_n1ql_nodes:
self.__n1ql_nodes.remove(node)
if not self.__n1ql_nodes:
raise FTSException("No node in the cluster has 'n1ql' service"
" enabled")
if len(self.__n1ql_nodes) == 1:
return self.__n1ql_nodes[0]
return self.__n1ql_nodes[random.randint(0, len(self.__n1ql_nodes) - 1)]
def get_random_non_fts_node(self):
return self.__non_fts_nodes[random.randint(0, len(self.__fts_nodes) - 1)]
def are_index_files_deleted_from_disk(self, index_name):
nodes = self.get_fts_nodes()
for node in nodes:
data_dir = RestConnection(node).get_data_path()
shell = RemoteMachineShellConnection(node)
count = -1
retry = 0
while count != 0:
count, err = shell.execute_command(
"ls {0}/@fts |grep ^{1} | wc -l".
format(data_dir, index_name))
if isinstance(count, list):
count = int(count[0])
else:
count = int(count)
self.__log.info(count)
time.sleep(2)
retry += 1
if retry > 5:
files, err = shell.execute_command(
"ls {0}/@fts |grep ^{1}".
format(data_dir, index_name))
self.__log.info(files)
return False
return True
def get_mem_quota(self):
return self.__mem_quota
def get_nodes(self):
return self.__nodes
def get_name(self):
return self.__name
def get_cluster(self):
return self.__clusterop
def get_kv_gen(self):
raise_if(
self._kv_gen is None,
FTSException(
"KV store is empty on couchbase cluster: %s" %
self))
return self._kv_gen
def init_cluster(self, cluster_services, available_nodes):
"""Initialize cluster.
1. Initialize all nodes.
2. Add all nodes to the cluster based on services list
@param cluster_services: list of cluster node services
@param available_nodes: extra nodes available to be added
"""
self.__log.info("Initializing Cluster ...")
if len(cluster_services) - 1 > len(available_nodes):
raise FTSException("Only %s nodes present for given cluster"
"configuration %s"
% (len(available_nodes) + 1, cluster_services))
self.__init_nodes()
if available_nodes:
nodes_to_add = []
node_services = []
node_num = 0
for index, node_service in enumerate(cluster_services):
if index == 0 and node_service == "kv":
continue
self.__log.info("%s will be configured with services %s" % (
available_nodes[node_num].ip,
node_service))
nodes_to_add.append(available_nodes[node_num])
node_services.append(node_service)
node_num = node_num + 1
try:
self.__clusterop.async_rebalance(
self.__nodes,
nodes_to_add,
[],
use_hostnames=self.__use_hostname,
services=node_services).result()
except Exception as e:
raise FTSException("Unable to initialize cluster with config "
"%s: %s" % (cluster_services, e))
self.__nodes += nodes_to_add
self.__separate_nodes_on_services()
if not self.is_cluster_run() and \
(TestInputSingleton.input.param("fdb_compact_interval", None) or \
TestInputSingleton.input.param("fdb_compact_threshold", None)):
for node in self.__fts_nodes:
NodeHelper.set_cbft_env_fdb_options(node)
def cleanup_cluster(
self,
test_case,
cluster_shutdown=True):
"""Cleanup cluster.
1. Remove all remote cluster references.
2. Remove all replications.
3. Remove all buckets.
@param test_case: Test case object.
@param test_failed: True if test failed else False.
@param cluster_run: True if test execution is single node cluster run else False.
@param cluster_shutdown: True if Task (task.py) Scheduler needs to shutdown else False
"""
try:
if self.get_indexes():
self.delete_all_fts_indexes()
self.__log.info("removing nodes from cluster ...")
self.__stop_rebalance()
self.__log.info("cleanup {0}".format(self.__nodes))
for node in self.__nodes:
BucketOperationHelper.delete_all_buckets_or_assert(
[node],
test_case)
force_eject = TestInputSingleton.input.param(
"forceEject",
False)
if force_eject and node != self.__master_node:
try:
rest = RestConnection(node)
rest.force_eject_node()
except BaseException as e:
self.__log.error(e)
else:
ClusterOperationHelper.cleanup_cluster([node])
ClusterOperationHelper.wait_for_ns_servers_or_assert(
[node],
test_case)
finally:
if cluster_shutdown:
self.__clusterop.shutdown(force=True)
try:
self.__log.info("Removing user 'cbadminbucket'...")
RbacBase().remove_user_role(['cbadminbucket'], RestConnection(
self.__master_node))
except Exception as e:
self.__log.info(e)
def _create_bucket_params(self, server, replicas=1, size=0, port=11211, password=None,
bucket_type='membase', enable_replica_index=1, eviction_policy='valueOnly',
bucket_priority=None, flush_enabled=1, lww=False, maxttl=None):
"""Create a set of bucket_parameters to be sent to all of the bucket_creation methods
Parameters:
server - The server to create the bucket on. (TestInputServer)
bucket_name - The name of the bucket to be created. (String)
port - The port to create this bucket on. (String)
password - The password for this bucket. (String)
size - The size of the bucket to be created. (int)
enable_replica_index - can be 0 or 1, 1 enables indexing of replica bucket data (int)
replicas - The number of replicas for this bucket. (int)
eviction_policy - The eviction policy for the bucket, can be valueOnly or fullEviction. (String)
bucket_priority - The priority of the bucket:either none, low, or high. (String)
bucket_type - The type of bucket. (String)
flushEnabled - Enable or Disable the flush functionality of the bucket. (int)
lww = determine the conflict resolution type of the bucket. (Boolean)
Returns:
bucket_params - A dictionary containing the parameters needed to create a bucket."""
bucket_params = {}
bucket_params['server'] = server
bucket_params['replicas'] = replicas
bucket_params['size'] = size
bucket_params['port'] = port
bucket_params['password'] = password
bucket_params['bucket_type'] = bucket_type
bucket_params['enable_replica_index'] = enable_replica_index
bucket_params['eviction_policy'] = eviction_policy
bucket_params['bucket_priority'] = bucket_priority
bucket_params['flush_enabled'] = flush_enabled
bucket_params['lww'] = lww
bucket_params['maxTTL'] = maxttl
return bucket_params
def create_sasl_buckets(
self, bucket_size, num_buckets=1, num_replicas=1,
eviction_policy=EVICTION_POLICY.VALUE_ONLY,
bucket_priority=BUCKET_PRIORITY.HIGH,
bucket_type=None, maxttl=None):
"""Create sasl buckets.
@param bucket_size: size of the bucket.
@param num_buckets: number of buckets to create.
@param num_replicas: number of replicas (1-3).
@param eviction_policy: valueOnly etc.
@param bucket_priority: high/low etc.
"""
bucket_tasks = []
for i in range(num_buckets):
name = "sasl_bucket_" + str(i + 1)
sasl_params = self._create_bucket_params(
server=self.__master_node,
password='password',
size=bucket_size,
replicas=num_replicas,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl)
bucket_tasks.append(self.__clusterop.async_create_sasl_bucket(name=name, password='password',
bucket_params=sasl_params))
self.__buckets.append(
Bucket(
name=name, authType="sasl", saslPassword="password",
num_replicas=num_replicas, bucket_size=bucket_size,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
maxttl=maxttl
))
for task in bucket_tasks:
task.result()
def create_standard_buckets(
self, bucket_size, name=None, num_buckets=1,
port=None, num_replicas=1,
eviction_policy=EVICTION_POLICY.VALUE_ONLY,
bucket_priority=BUCKET_PRIORITY.HIGH,
bucket_type=None, maxttl=None):
"""Create standard buckets.
@param bucket_size: size of the bucket.
@param num_buckets: number of buckets to create.
@param num_replicas: number of replicas (1-3).
@param eviction_policy: valueOnly etc.
@param bucket_priority: high/low etc.
"""
bucket_tasks = []
start_port = STANDARD_BUCKET_PORT
if port:
start_port = port
if not bucket_type:
bucket_type = 'membase'
for i in range(num_buckets):
if not (num_buckets == 1 and name):
name = "standard_bucket_" + str(i + 1)
standard_params = self._create_bucket_params(
server=self.__master_node,
size=bucket_size,
replicas=num_replicas,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl)
bucket_tasks.append(
self.__clusterop.async_create_standard_bucket(
name=name, port=STANDARD_BUCKET_PORT + i,
bucket_params=standard_params))
self.__buckets.append(
Bucket(
name=name,
authType=None,
saslPassword=None,
num_replicas=num_replicas,
bucket_size=bucket_size,
port=start_port + i,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
maxttl=maxttl
))
for task in bucket_tasks:
task.result()
def create_default_bucket(
self, bucket_size, num_replicas=1,
eviction_policy=EVICTION_POLICY.VALUE_ONLY,
bucket_priority=BUCKET_PRIORITY.HIGH,
bucket_type=None, maxttl=None):
"""Create default bucket.
@param bucket_size: size of the bucket.
@param num_replicas: number of replicas (1-3).
@param eviction_policy: valueOnly etc.
@param bucket_priority: high/low etc.
"""
bucket_params = self._create_bucket_params(
server=self.__master_node,
size=bucket_size,
replicas=num_replicas,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl
)
self.__clusterop.create_default_bucket(bucket_params)
self.__buckets.append(
Bucket(
name=BUCKET_NAME.DEFAULT,
authType="sasl",
saslPassword="",
num_replicas=num_replicas,
bucket_size=bucket_size,
eviction_policy=eviction_policy,
bucket_priority=bucket_priority,
maxttl=maxttl
))
def create_fts_index(self, name, source_type='couchbase',
source_name=None, index_type='fulltext-index',
index_params=None, plan_params=None,
source_params=None, source_uuid=None):
"""Create fts index/alias
@param node: Node on which index is created
@param name: name of the index/alias
@param source_type : 'couchbase' or 'files'
@param source_name : name of couchbase bucket or "" for alias
@param index_type : 'fulltext-index' or 'fulltext-alias'
@param index_params : to specify advanced index mapping;
dictionary overriding params in
INDEX_DEFAULTS.BLEVE_MAPPING or
INDEX_DEFAULTS.ALIAS_DEFINITION depending on
index_type
@param plan_params : dictionary overriding params defined in
INDEX_DEFAULTS.PLAN_PARAMS
@param source_params: dictionary overriding params defined in
INDEX_DEFAULTS.SOURCE_CB_PARAMS or
INDEX_DEFAULTS.SOURCE_FILE_PARAMS
@param source_uuid: UUID of the source, may not be used
"""
index = FTSIndex(
self,
name,
source_type,
source_name,
index_type,
index_params,
plan_params,
source_params,
source_uuid
)
index.create()
return index
def create_fts_index_wait_for_completion(self, sample_index_name_1, sample_bucket_name):
fts_idx = self.create_fts_index(name=sample_index_name_1, source_name=sample_bucket_name)
indexed_doc_count = 0
self.__log.info(RestConnection(self.get_master_node()).get_buckets_itemCount()[sample_bucket_name])
while indexed_doc_count < RestConnection(self.get_master_node()).get_buckets_itemCount()[sample_bucket_name]:
try:
time.sleep(10)
indexed_doc_count = fts_idx.get_indexed_doc_count()
except KeyError as k:
continue
return fts_idx
def get_fts_index_by_name(self, name):
""" Returns an FTSIndex object with the given name """
for index in self.__indexes:
if index.name == name:
return index
def delete_fts_index(self, name):
""" Delete an FTSIndex object with the given name from a given node """
for index in self.__indexes:
if index.name == name:
index.delete()
def delete_all_fts_indexes(self):
""" Delete all FTSIndexes from a given node """
for index in self.__indexes:
index.delete()
def run_fts_query(self, index_name, query_dict, node=None, timeout=70):
""" Runs a query defined in query_json against an index/alias and
a specific node
@return total_hits : total hits for the query,
@return hit_list : list of docs that match the query
"""
if not node:
node = self.get_random_fts_node()
self.__log.info("Running query %s on node: %s:%s"
% (json.dumps(query_dict, ensure_ascii=False),
node.ip, node.fts_port))
total_hits, hit_list, time_taken, status = \
RestConnection(node).run_fts_query(index_name, query_dict, timeout=timeout)
return total_hits, hit_list, time_taken, status
def run_n1ql_query(self, query="", node=None, timeout=70):
""" Runs a query defined in query_json against an index/alias and
a specific node
"""
if not node:
node = self.get_random_n1ql_node()
res = RestConnection(node).query_tool(query)
return res
def run_fts_query_with_facets(self, index_name, query_dict, node=None):
""" Runs a query defined in query_json against an index/alias and
a specific node
@return total_hits : total hits for the query,
@return hit_list : list of docs that match the query
"""
if not node:
node = self.get_random_fts_node()
self.__log.info("Running query %s on node: %s:%s"
% (json.dumps(query_dict, ensure_ascii=False),
node.ip, node.fts_port))
total_hits, hit_list, time_taken, status, facets = \
RestConnection(node).run_fts_query_with_facets(index_name, query_dict)
return total_hits, hit_list, time_taken, status, facets
def get_buckets(self):
if not self.__buckets:
self.__buckets = RestConnection(self.__master_node).get_buckets()
return self.__buckets
def get_bucket_by_name(self, bucket_name):
"""Return the bucket with given name
@param bucket_name: bucket name.
@return: bucket object
"""
for bucket in RestConnection(self.__master_node).get_buckets():
if bucket.name == bucket_name:
return bucket
raise Exception(
"Bucket with name: %s not found on the cluster" %
bucket_name)
def get_doc_count_in_bucket(self, bucket):
return RestConnection(self.__master_node).get_active_key_count(bucket)
def delete_bucket(self, bucket_name):
"""Delete bucket with given name
@param bucket_name: bucket name (string) to delete
"""
bucket_to_remove = self.get_bucket_by_name(bucket_name)
self.__clusterop.bucket_delete(
self.__master_node,
bucket_to_remove.name)
for bucket_in in self.__buckets:
if bucket_in.name == bucket_to_remove:
self.__buckets.remove(bucket_in)
def delete_all_buckets(self):
for bucket_to_remove in self.__buckets:
self.__clusterop.bucket_delete(
self.__master_node,
bucket_to_remove.name)
self.__buckets.remove(bucket_to_remove)
def flush_buckets(self, buckets=[]):
buckets = buckets or self.__buckets
tasks = []
for bucket in buckets:
tasks.append(self.__clusterop.async_bucket_flush(
self.__master_node,
bucket))
[task.result() for task in tasks]
def load_from_high_ops_loader(self, bucket):
input = TestInputSingleton.input
batch_size = input.param("batch_size", 1000)
instances = input.param("instances", 8)
threads = input.param("threads", 8)
items = input.param("items", 6000000)
self.__clusterop.load_buckets_with_high_ops(
server=self.__master_node,
bucket=bucket,
items=items,
batch=batch_size,
threads=threads,
start_document=0,
instances=instances,
ttl=0)
def check_dataloss_with_high_ops_loader(self, bucket):
self.__clusterop.check_dataloss_for_high_ops_loader(
self.__master_node,
bucket,
TestInputSingleton.input.param("items", 6000000),
batch=20000,
threads=5,
start_document=0,
updated=False,
ops=0,
ttl=0,
deleted=False,
deleted_items=0)
def async_load_bucket(self, bucket, num_items, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data asynchronously on given bucket. Function don't wait for
load data to finish, return immidiately.
@param bucket: bucket where to load data.
@param num_items: number of items to load
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
@return: task object
"""
seed = "%s-key-" % self.__name
self._kv_gen[OPS.CREATE] = JsonDocGenerator(seed,
encoding="utf-8",
start=0,
end=num_items)
gen = copy.deepcopy(self._kv_gen[OPS.CREATE])
task = self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, gen, bucket.kvs[kv_store],
OPS.CREATE, exp, flag, only_store_hash, batch_size, pause_secs,
timeout_secs, compression=self.sdk_compression)
return task
def load_bucket(self, bucket, num_items, value_size=512, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data synchronously on given bucket. Function wait for
load data to finish.
@param bucket: bucket where to load data.
@param num_items: number of items to load
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
task = self.async_load_bucket(bucket, num_items, value_size, exp,
kv_store, flag, only_store_hash,
batch_size, pause_secs, timeout_secs)
task.result()
def async_load_all_buckets(self, num_items, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data asynchronously on all buckets of the cluster.
Function don't wait for load data to finish, return immidiately.
@param num_items: number of items to load
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
@return: task objects list
"""
prefix = "%s-" % self.__name
self._kv_gen[OPS.CREATE] = JsonDocGenerator(prefix,
encoding="utf-8",
start=0,
end=num_items)
tasks = []
for bucket in self.__buckets:
gen = copy.deepcopy(self._kv_gen[OPS.CREATE])
tasks.append(
self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, gen, bucket.kvs[kv_store],
OPS.CREATE, exp, flag, only_store_hash, batch_size,
pause_secs, timeout_secs, compression=self.sdk_compression)
)
return tasks
def load_all_buckets(self, num_items, value_size=512, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data synchronously on all buckets. Function wait for
load data to finish.
@param num_items: number of items to load
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
tasks = self.async_load_all_buckets(
num_items, exp, kv_store, flag, only_store_hash,
batch_size, pause_secs, timeout_secs)
for task in tasks:
task.result()
def load_all_buckets_from_generator(self, kv_gen, ops=OPS.CREATE, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data synchronously on all buckets. Function wait for
load data to finish.
@param gen: BlobGenerator() object
@param ops: OPS.CREATE/UPDATE/DELETE/APPEND.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
tasks = self.async_load_all_buckets_from_generator(kv_gen)
for task in tasks:
task.result()
def async_load_all_buckets_from_generator(self, kv_gen, ops=OPS.CREATE, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=5000, pause_secs=1, timeout_secs=30):
"""Load data asynchronously on all buckets. Function wait for
load data to finish.
@param gen: BlobGenerator() object
@param ops: OPS.CREATE/UPDATE/DELETE/APPEND.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
if ops not in self._kv_gen:
self._kv_gen[ops] = kv_gen
tasks = []
if not self.__buckets:
self.__buckets = RestConnection(self.__master_node).get_buckets()
for bucket in self.__buckets:
kv_gen = copy.deepcopy(self._kv_gen[ops])
tasks.append(
self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, kv_gen,
bucket.kvs[kv_store], ops, exp, flag,
only_store_hash, batch_size, pause_secs, timeout_secs, compression=self.sdk_compression)
)
return tasks
def async_load_bucket_from_generator(self, bucket, kv_gen, ops=OPS.CREATE, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=5000, pause_secs=1, timeout_secs=30):
"""Load data asynchronously on all buckets. Function wait for
load data to finish.
@param bucket: pass object of bucket to load into
@param gen: BlobGenerator() object
@param ops: OPS.CREATE/UPDATE/DELETE/APPEND.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
task = []
task.append(
self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, kv_gen,
bucket.kvs[kv_store], ops, exp, flag,
only_store_hash, batch_size, pause_secs, timeout_secs, compression=self.sdk_compression)
)
return task
def load_all_buckets_till_dgm(self, active_resident_ratio, es=None,
items=1000, exp=0, kv_store=1, flag=0,
only_store_hash=True, batch_size=1000,
pause_secs=1, timeout_secs=30):
"""Load data synchronously on all buckets till dgm (Data greater than memory)
for given active_resident_ratio
@param active_resident_ratio: Dgm threshold.
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
items = int(items)
self.__log.info("First loading \"items\" {0} number keys to handle "
"update/deletes in dgm cases".format(items))
self.load_all_buckets(items)
self.__log.info("Now loading extra keys to reach dgm limit")
seed = "%s-" % self.__name
end = 0
current_active_resident = StatsCommon.get_stats(
[self.__master_node],
self.__buckets[0],
'',
'vb_active_perc_mem_resident')[self.__master_node]
start = items
while int(current_active_resident) > active_resident_ratio:
batch_size = 1000
if int(current_active_resident) - active_resident_ratio > 5:
end = start + batch_size * 100
batch_size = batch_size * 100
else:
end = start + batch_size * 10
batch_size = batch_size * 10
self.__log.info("Generating %s keys ..." % (end - start))
kv_gen = JsonDocGenerator(seed,
encoding="utf-8",
start=start,
end=end)
self.__log.info("Loading %s keys ..." % (end - start))
tasks = []
for bucket in self.__buckets:
tasks.append(self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, copy.deepcopy(kv_gen), bucket.kvs[kv_store],
OPS.CREATE, exp, flag, only_store_hash, batch_size,
pause_secs, timeout_secs, compression=self.sdk_compression))
if es:
tasks.append(es.async_bulk_load_ES(index_name='default_es_index',
gen=kv_gen,
op_type='create'))
for task in tasks:
task.result(timeout=2000)
start = end
current_active_resident = StatsCommon.get_stats(
[self.__master_node],
bucket,
'',
'vb_active_perc_mem_resident')[self.__master_node]
self.__log.info(
"Current resident ratio: %s, desired: %s bucket %s" % (
current_active_resident,
active_resident_ratio,
bucket.name))
self._kv_gen[OPS.CREATE].gen_docs.update(kv_gen.gen_docs)
self._kv_gen[OPS.CREATE].end = kv_gen.end
self.__log.info("Loaded a total of %s keys into bucket %s"
% (end, bucket.name))
return self._kv_gen[OPS.CREATE]
def update_bucket(self, bucket, fields_to_update=None, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Load data synchronously on given bucket. Function wait for
load data to finish.
@param bucket: bucket where to load data.
@param fields_to_update: list of fields to update in loaded JSON
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
"""
self.__log.info("Updating fields %s in bucket %s" % (fields_to_update,
bucket.name))
task = self.async_update_bucket(bucket, fields_to_update=fields_to_update,
exp=exp, kv_store=kv_store, flag=flag,
only_store_hash=only_store_hash,
batch_size=batch_size,
pause_secs=pause_secs,
timeout_secs=timeout_secs)
task.result()
def async_update_bucket(self, bucket, fields_to_update=None, exp=0,
kv_store=1, flag=0, only_store_hash=True,
batch_size=1000, pause_secs=1, timeout_secs=30):
"""Update data asynchronously on given bucket. Function don't wait for
load data to finish, return immediately.
@param bucket: bucket where to load data.
@param fields_to_update: list of fields to update in loaded JSON
@param value_size: size of the one item.
@param exp: expiration value.
@param kv_store: kv store index.
@param flag:
@param only_store_hash: True to store hash of item else False.
@param batch_size: batch size for load data at a time.
@param pause_secs: pause for next batch load.
@param timeout_secs: timeout
@return: task object
"""
perc = 30
self._kv_gen[OPS.UPDATE] = copy.deepcopy(self._kv_gen[OPS.CREATE])
self._kv_gen[OPS.UPDATE].start = 0
self._kv_gen[OPS.UPDATE].end = int(self._kv_gen[OPS.CREATE].end
* (float)(perc) / 100)
self._kv_gen[OPS.UPDATE].update(fields_to_update=fields_to_update)
task = self.__clusterop.async_load_gen_docs(
self.__master_node, bucket.name, self._kv_gen[OPS.UPDATE],
bucket.kvs[kv_store], OPS.UPDATE, exp, flag, only_store_hash,
batch_size, pause_secs, timeout_secs, compression=self.sdk_compression)
return task
def update_delete_data(
self, op_type, fields_to_update=None, perc=30, expiration=0,
wait_for_expiration=True):
"""Perform update/delete operation on all buckets. Function wait
operation to finish.
@param op_type: OPS.CREATE/OPS.UPDATE/OPS.DELETE
@param fields_to_update: list of fields to be updated in the JSON
@param perc: percentage of data to be deleted or created
@param expiration: time for expire items
@param wait_for_expiration: True if wait for expire of items after
update else False
"""
tasks = self.async_update_delete(op_type, fields_to_update, perc, expiration)
[task.result() for task in tasks]
if wait_for_expiration and expiration:
self.__log.info("Waiting for expiration of updated items")
time.sleep(expiration)
def async_update_delete(
self, op_type, fields_to_update=None, perc=30, expiration=0,
kv_store=1):
"""Perform update/delete operation on all buckets. Function don't wait
operation to finish.
@param op_type: OPS.CREATE/OPS.UPDATE/OPS.DELETE
@param fields_to_update: list of fields to be updated in JSON
@param perc: percentage of data to be deleted or created
@param expiration: time for expire items
@return: task object list
"""
raise_if(
OPS.CREATE not in self._kv_gen,
FTSException(
"Data is not loaded in cluster.Load data before update/delete")
)
tasks = []
for bucket in self.__buckets:
if op_type == OPS.UPDATE:
self._kv_gen[OPS.UPDATE] = copy.deepcopy(self._kv_gen[OPS.CREATE])
self._kv_gen[OPS.UPDATE].start = 0
self._kv_gen[OPS.UPDATE].end = int(self._kv_gen[OPS.CREATE].end
* (float)(perc) / 100)
self._kv_gen[OPS.UPDATE].update(fields_to_update=fields_to_update)
gen = self._kv_gen[OPS.UPDATE]
elif op_type == OPS.DELETE:
self._kv_gen[OPS.DELETE] = JsonDocGenerator(
self._kv_gen[OPS.CREATE].name,
op_type=OPS.DELETE,
encoding="utf-8",
start=int((self._kv_gen[OPS.CREATE].end)
* (float)(100 - perc) / 100),
end=self._kv_gen[OPS.CREATE].end)
gen = copy.deepcopy(self._kv_gen[OPS.DELETE])
else:
raise FTSException("Unknown op_type passed: %s" % op_type)
self.__log.info("At bucket '{0}' @ {1}: operation: {2}, key range {3} - {4}".
format(bucket.name, self.__name, op_type, gen.start, gen.end - 1))
tasks.append(
self.__clusterop.async_load_gen_docs(
self.__master_node,
bucket.name,
gen,
bucket.kvs[kv_store],
op_type,
expiration,
batch_size=1000,
compression=self.sdk_compression)
)
return tasks
def async_run_fts_query_compare(self, fts_index, es, query_index, es_index_name=None, n1ql_executor=None):
"""
Asynchronously run query against FTS and ES and compare result
note: every task runs a single query
"""
task = self.__clusterop.async_run_fts_query_compare(fts_index=fts_index,
es_instance=es,
query_index=query_index,
es_index_name=es_index_name,
n1ql_executor=n1ql_executor)
return task
def run_expiry_pager(self, val=10):
"""Run expiry pager process and set interval to 10 seconds
and wait for 10 seconds.
@param val: time in seconds.
"""
for bucket in self.__buckets:
ClusterOperationHelper.flushctl_set(
self.__master_node,
"exp_pager_stime",
val,
bucket)
self.__log.info("wait for expiry pager to run on all these nodes")
time.sleep(val)
def disable_compaction(self, bucket=BUCKET_NAME.DEFAULT):
"""Disable view compaction
@param bucket: bucket name.
"""
new_config = {"viewFragmntThresholdPercentage": None,
"dbFragmentThresholdPercentage": None,
"dbFragmentThreshold": None,
"viewFragmntThreshold": None}
self.__clusterop.modify_fragmentation_config(
self.__master_node,
new_config,
bucket)
def __async_rebalance_out(self, master=False, num_nodes=1):
"""Rebalance-out nodes from Cluster
@param master: True if rebalance-out master node only.
@param num_nodes: number of nodes to rebalance-out from cluster.
"""
raise_if(
len(self.__nodes) <= num_nodes,
FTSException(
"Cluster needs:{0} nodes for rebalance-out, current: {1}".
format((num_nodes + 1), len(self.__nodes)))
)
if master:
to_remove_node = [self.__master_node]
else:
to_remove_node = self.__nodes[-num_nodes:]
self.__log.info(
"Starting rebalance-out nodes:{0} at {1} cluster {2}".format(
to_remove_node, self.__name, self.__master_node.ip))
task = self.__clusterop.async_rebalance(
self.__nodes,
[],
to_remove_node)
[self.__nodes.remove(node) for node in to_remove_node]
if master:
self.__master_node = self.__nodes[0]
return task
def async_rebalance_out_master(self):
return self.__async_rebalance_out(master=True)
def async_rebalance_out(self, num_nodes=1):
return self.__async_rebalance_out(num_nodes=num_nodes)
def rebalance_out_master(self):
task = self.__async_rebalance_out(master=True)
task.result()
def rebalance_out(self, num_nodes=1):
task = self.__async_rebalance_out(num_nodes=num_nodes)
task.result()
def enable_retry_rebalance(self, retry_time, num_retries):
body = {"enabled": "true", "afterTimePeriod": retry_time, "maxAttempts": num_retries}
rest = RestConnection(self.get_master_node())
rest.set_retry_rebalance_settings(body)
result = rest.get_retry_rebalance_settings()
self.__log.info("Retry Rebalance settings changed to : {0}"
.format(json.loads(result)))
def disable_retry_rebalance(self):
rest = RestConnection(self.get_master_node())
body = {"enabled": "false"}
rest.set_retry_rebalance_settings(body)
def async_rebalance_in(self, num_nodes=1, services=None):
"""Rebalance-in nodes into Cluster asynchronously
@param num_nodes: number of nodes to rebalance-in to cluster.
"""
raise_if(
len(FloatingServers._serverlist) < num_nodes,
FTSException(
"Number of free nodes: {0}, test tried to add {1} new nodes!".
format(len(FloatingServers._serverlist), num_nodes))
)
to_add_node = []
for _ in range(num_nodes):
to_add_node.append(FloatingServers._serverlist.pop())
self.__log.info(
"Starting rebalance-in nodes:{0} at {1} cluster {2}".format(
to_add_node, self.__name, self.__master_node.ip))
task = self.__clusterop.async_rebalance(self.__nodes, to_add_node, [],
services=services)
self.__nodes.extend(to_add_node)
return task
def rebalance_in(self, num_nodes=1, services=None):
"""Rebalance-in nodes
@param num_nodes: number of nodes to add to cluster.
"""
task = self.async_rebalance_in(num_nodes, services=services)
task.result()
def __async_swap_rebalance(self, master=False, num_nodes=1, services=None):
"""Swap-rebalance nodes on Cluster
@param master: True if swap-rebalance master node else False.
"""
if master:
to_remove_node = [self.__master_node]
else:
to_remove_node = self.__nodes[len(self.__nodes) - num_nodes:]
raise_if(
len(FloatingServers._serverlist) < num_nodes,
FTSException(
"Number of free nodes: {0}, test tried to add {1} new nodes!".
format(len(FloatingServers._serverlist), num_nodes))
)
to_add_node = []
for _ in range(num_nodes):
node = FloatingServers._serverlist.pop()
if node not in self.__nodes:
to_add_node.append(node)
self.__log.info(
"Starting swap-rebalance [remove_node:{0}] -> [add_node:{1}] at"
" {2} cluster {3}"
.format(to_remove_node, to_add_node, self.__name,
self.__master_node.ip))
task = self.__clusterop.async_rebalance(
self.__nodes,
to_add_node,
to_remove_node,
services=services)
for remove_node in to_remove_node:
self.__nodes.remove(remove_node)
self.__nodes.extend(to_add_node)
if master:
self.__master_node = self.__nodes[0]
return task
def async_swap_rebalance_master(self, services=None):
"""
Returns without waiting for swap rebalance to complete
"""
return self.__async_swap_rebalance(master=True, services=services)
def async_swap_rebalance(self, num_nodes=1, services=None):
return self.__async_swap_rebalance(num_nodes=num_nodes,
services=services)
def swap_rebalance_master(self, services=None):
"""Swap rebalance master node and wait
"""
task = self.__async_swap_rebalance(master=True, services=services)
task.result()
def swap_rebalance(self, services=None, num_nodes=1):
"""Swap rebalance non-master node
"""
task = self.__async_swap_rebalance(services=services,
num_nodes=num_nodes)
task.result()
def async_failover_and_rebalance(self, master=False, num_nodes=1,
graceful=False):
"""Asynchronously failover nodes from Cluster
@param master: True if failover master node only.
@param num_nodes: number of nodes to rebalance-out from cluster.
@param graceful: True if graceful failover else False.
"""
task = self.__async_failover(master=master,
num_nodes=num_nodes,
graceful=graceful)
task.result()
tasks = self.__clusterop.async_rebalance(self.__nodes, [], [],
services=None)
return tasks
def failover(self, master=False, num_nodes=1,
graceful=False):
"""synchronously failover nodes from Cluster
@param master: True if failover master node only.
@param num_nodes: number of nodes to rebalance-out from cluster.
@param graceful: True if graceful failover else False.
"""
task = self.__async_failover(master=master,
num_nodes=num_nodes,
graceful=graceful)
task.result()
def __async_failover(self, master=False, num_nodes=1, graceful=False, node=None):
"""Failover nodes from Cluster
@param master: True if failover master node only.
@param num_nodes: number of nodes to rebalance-out from cluster.
@param graceful: True if graceful failover else False.
@param node: Specific node to be failed over
"""
raise_if(
len(self.__nodes) <= 1,
FTSException(
"More than 1 node required in cluster to perform failover")
)
if node:
self.__fail_over_nodes = [node]
elif master:
self.__fail_over_nodes = [self.__master_node]
else:
self.__fail_over_nodes = self.__nodes[-num_nodes:]
self.__log.info(
"Starting failover for nodes:{0} at {1} cluster {2}".format(
self.__fail_over_nodes, self.__name, self.__master_node.ip))
task = self.__clusterop.async_failover(
self.__nodes,
self.__fail_over_nodes,
graceful)
return task
def async_failover(self, master=False, num_nodes=1, graceful=False, node=None):
return self.__async_failover(master=master, num_nodes=num_nodes, graceful=graceful, node=node)
def failover_and_rebalance_master(self, graceful=False, rebalance=True):
"""Failover master node
@param graceful: True if graceful failover else False
@param rebalance: True if do rebalance operation after failover.
"""
task = self.__async_failover(master=True, graceful=graceful)
task.result()
if graceful:
# wait for replica update
time.sleep(60)
# use rebalance stats to monitor failover
RestConnection(self.__master_node).monitorRebalance()
if rebalance:
self.rebalance_failover_nodes()
self.__master_node = self.__nodes[0]
def failover_and_rebalance_nodes(self, num_nodes=1, graceful=False,
rebalance=True):
""" Failover non-master nodes
@param num_nodes: number of nodes to failover.
@param graceful: True if graceful failover else False
@param rebalance: True if do rebalance operation after failover.
"""
task = self.__async_failover(
master=False,
num_nodes=num_nodes,
graceful=graceful)
task.result()
if graceful:
time.sleep(60)
# use rebalance stats to monitor failover
RestConnection(self.__master_node).monitorRebalance()
if rebalance:
self.rebalance_failover_nodes()
def rebalance_failover_nodes(self):
self.__clusterop.rebalance(self.__nodes, [], self.__fail_over_nodes)
[self.__nodes.remove(node) for node in self.__fail_over_nodes]
self.__fail_over_nodes = []
def add_back_node(self, recovery_type=None, services=None):
"""add-back failed-over node to the cluster.
@param recovery_type: delta/full
"""
raise_if(
len(self.__fail_over_nodes) < 1,
FTSException("No failover nodes available to add_back")
)
rest = RestConnection(self.__master_node)
server_nodes = rest.node_statuses()
for failover_node in self.__fail_over_nodes:
for server_node in server_nodes:
if server_node.ip == failover_node.ip:
if recovery_type:
rest.set_recovery_type(
otpNode=server_node.id,
recoveryType=recovery_type)
rest.add_back_node(server_node.id)
for node in self.__fail_over_nodes:
if node not in self.__nodes:
self.__nodes.append(node)
self.__clusterop.rebalance(self.__nodes, [], [], services=services)
self.__fail_over_nodes = []
def async_failover_add_back_node(self, num_nodes=1, graceful=False,
recovery_type=None, services=None):
"""add-back failed-over node to the cluster.
@param recovery_type: delta/full
"""
task = self.__async_failover(
master=False,
num_nodes=num_nodes,
graceful=graceful)
task.result()
time.sleep(60)
if graceful:
# use rebalance stats to monitor failover
RestConnection(self.__master_node).monitorRebalance()
raise_if(
len(self.__fail_over_nodes) < 1,
FTSException("No failover nodes available to add_back")
)
rest = RestConnection(self.__master_node)
server_nodes = rest.node_statuses()
for failover_node in self.__fail_over_nodes:
for server_node in server_nodes:
if server_node.ip == failover_node.ip:
rest.add_back_node(server_node.id)
if recovery_type:
rest.set_recovery_type(
otpNode=server_node.id,
recoveryType=recovery_type)
for node in self.__fail_over_nodes:
if node not in self.__nodes:
self.__nodes.append(node)
self.__fail_over_nodes = []
tasks = self.__clusterop.async_rebalance(self.__nodes, [], [], services=services)
return tasks
def warmup_node(self, master=False):
"""Warmup node on cluster
@param master: True if warmup master-node else False.
"""
from random import randrange
if master:
warmup_node = self.__master_node
else:
warmup_node = self.__nodes[
randrange(
1, len(
self.__nodes))]
NodeHelper.do_a_warm_up(warmup_node)
return warmup_node
def reboot_one_node(self, test_case, master=False):
from random import randrange
if master:
reboot_node = self.__master_node
else:
reboot_node = self.__nodes[
randrange(
1, len(
self.__nodes))]
NodeHelper.reboot_server(reboot_node, test_case)
return reboot_node
def reboot_after_timeout(self, timeout=5):
time.sleep(timeout)
self.reboot_one_node(test_case=self)
def restart_couchbase_on_all_nodes(self):
for node in self.__nodes:
NodeHelper.do_a_warm_up(node)
NodeHelper.wait_warmup_completed(self.__nodes)
def wait_for_flusher_empty(self, timeout=60):
"""Wait for disk queue to completely flush.
"""
tasks = []
for node in self.__nodes:
for bucket in self.__buckets:
tasks.append(
self.__clusterop.async_wait_for_stats(
[node],
bucket,
'',
'ep_queue_size',
'==',
0))
for task in tasks:
task.result(timeout)
class FTSBaseTest(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self._input = TestInputSingleton.input
self.elastic_node = self._input.elastic
self.log = logger.Logger.get_logger()
self.__init_logger()
self.__cluster_op = Cluster()
self.__init_parameters()
self.num_custom_analyzers = self._input.param("num_custom_analyzers", 0)
self.field_name = self._input.param("field_name", None)
self.field_type = self._input.param("field_type", None)
self.field_alias = self._input.param("field_alias", None)
self.enable_secrets = self._input.param("enable_secrets", False)
self.secret_password = self._input.param("secret_password", 'p@ssw0rd')
self._bucket_size = self._input.param("bucket_size")
self.log.info(
"==== FTSbasetests setup is started for test #{0} {1} ===="
.format(self.__case_number, self._testMethodName))
# workaround for MB-16794
# self.sleep(30, "working around MB-16794")
self.__setup_for_test()
self.log.info(
"==== FTSbasetests setup is finished for test #{0} {1} ===="
.format(self.__case_number, self._testMethodName))
def __is_test_failed(self):
return ( hasattr(self, '_outcome') and len(self._outcome.errors)) \
or (hasattr(self, '_exc_info')
and self._exc_info()[1] is not None)
def __is_cleanup_not_needed(self):
return ((self.__is_test_failed() and
self._input.param("stop-on-failure", False)) or
self._input.param("skip-cleanup", False))
def __is_cluster_run(self):
return len(set([server.ip for server in self._input.servers])) == 1
def _setup_node_secret(self, secret_password):
for server in self._input.servers:
SecretsMasterBase(server).setup_pass_node(server, secret_password)
def _check_retry_rebalance_succeeded(self):
rest = RestConnection(self._cb_cluster.get_master_node())
result = json.loads(rest.get_pending_rebalance_info())
self.log.info(result)
retry_after_secs = result["retry_after_secs"]
attempts_remaining = result["attempts_remaining"]
retry_rebalance = result["retry_rebalance"]
self.log.info("Attempts remaining : {0}, Retry rebalance : {1}".format(attempts_remaining, retry_rebalance))
while attempts_remaining:
# wait for the afterTimePeriod for the failed rebalance to restart
self.sleep(retry_after_secs, message="Waiting for the afterTimePeriod to complete")
try:
result = rest.monitorRebalance()
msg = "monitoring rebalance {0}"
self.log.info(msg.format(result))
except Exception:
result = json.loads(rest.get_pending_rebalance_info())
self.log.info(result)
try:
attempts_remaining = result["attempts_remaining"]
retry_rebalance = result["retry_rebalance"]
retry_after_secs = result["retry_after_secs"]
except KeyError:
self.fail("Retrying of rebalance still did not help. All the retries exhausted...")
self.log.info("Attempts remaining : {0}, Retry rebalance : {1}".format(attempts_remaining,
retry_rebalance))
else:
self.log.info("Retry rebalanced fixed the rebalance failure")
break
def tearDown(self):
"""Clusters cleanup"""
if len(self.__report_error_list) > 0:
error_logger = self.check_error_count_in_fts_log()
if error_logger:
self.fail("Errors found in logs : {0}".format(error_logger))
if self.enable_secrets:
self._setup_node_secret("")
if self._input.param("negative_test", False):
if hasattr(self, '_outcome') and self._outcome.errors[1][1]:
self._outcome.errors = []
self.log.info("This is marked as a negative test and contains "
"errors as expected, hence not failing it")
else:
raise FTSException("Negative test passed!")
if self._input.param("get-fts-diags", False) and self.__is_test_failed():
self.grab_fts_diag()
# collect logs before tearing down clusters
if self._input.param("get-cbcollect-info", False) and \
self.__is_test_failed():
for server in self._input.servers:
self.log.info("Collecting logs @ {0}".format(server.ip))
NodeHelper.collect_logs(server)
# ---backup pindex_data if the test has failed
# if self._input.param('backup_pindex_data', False) and \
# self.__is_test_failed():
# To reproduce MB-20494, temporarily remove condition to
# backup_pindex_data only if test has failed.
if self._input.param('backup_pindex_data', False):
for server in self._input.servers:
self.log.info("Backing up pindex data @ {0}".format(server.ip))
self.backup_pindex_data(server)
try:
if self.__is_cleanup_not_needed():
self.log.warning("CLEANUP WAS SKIPPED")
return
self.log.info(
"==== FTSbasetests cleanup is started for test #{0} {1} ===="
.format(self.__case_number, self._testMethodName))
self._cb_cluster.cleanup_cluster(self)
ntonencryptionBase().disable_nton_cluster(self._input.servers)
if self.compare_es:
self.teardown_es()
self.log.info(
"==== FTSbasetests cleanup is finished for test #{0} {1} ==="
.format(self.__case_number, self._testMethodName))
finally:
self.log.info("closing all ssh connections")
for ins in RemoteMachineShellConnection.get_instances():
#self.log.info(str(ins))
ins.disconnect()
self.log.info("closing all memcached connections")
for ins in MemcachedClient.get_instances():
#self.log.info(str(ins))
ins.close()
for ins in MC_MemcachedClient.get_instances():
#self.log.info(str(ins))
ins.close()
self.__cluster_op.shutdown(force=True)
unittest.TestCase.tearDown(self)
def __init_logger(self):
if self._input.param("log_level", None):
self.log.setLevel(level=0)
for hd in self.log.handlers:
if str(hd.__class__).find('FileHandler') != -1:
hd.setLevel(level=logging.DEBUG)
else:
hd.setLevel(
level=getattr(
logging,
self._input.param(
"log_level",
None)))
def _set_bleve_max_result_window(self):
bmrw_value = self._input.param("bmrw_value", 100000000)
for node in self._cb_cluster.get_fts_nodes():
self.log.info("updating bleve_max_result_window of node : {0}".format(node))
rest = RestConnection(node)
rest.set_bleve_max_result_window(bmrw_value)
def __setup_for_test(self):
self.log.info("-->Start: Setup for the test")
use_hostanames = self._input.param("use_hostnames", False)
no_buckets = self._input.param("no_buckets", False)
sdk_compression = self._input.param("sdk_compression", True)
master = self._input.servers[0]
first_node = copy.deepcopy(master)
self._cb_cluster = CouchbaseCluster("C1",
[first_node],
self.log,
use_hostanames,
sdk_compression=sdk_compression)
self.log.info("-->cleanup_previous")
if self.__is_cleanup_not_needed():
self.log.warning("CLEANUP WAS SKIPPED")
else:
self.__cleanup_previous()
if self.compare_es:
self.setup_es()
if not self._input.param("skip_init_cluster", False):
self.log.info("-->Initializing the cluster")
self._cb_cluster.init_cluster(self._cluster_services,
self._input.servers[1:])
self.log.info("-->Enabling the diagnostics")
self._enable_diag_eval_on_non_local_hosts()
# Add built-in user
self.log.info("--> Creating user cbadminbucket/cbadminbucket")
testuser = [{'id': 'cbadminbucket', 'name': 'cbadminbucket', 'password': 'password'}]
RbacBase().create_user_source(testuser, 'builtin', master)
# Assign user to role
self.log.info("--> Add user role cbadminbucket/cbadminbucket")
role_list = [{'id': 'cbadminbucket', 'name': 'cbadminbucket', 'roles': 'admin'}]
RbacBase().add_user_role(role_list, RestConnection(master), 'builtin')
self.log.info("--> Done: Add user role cbadminbucket/cbadminbucket")
self._set_bleve_max_result_window()
self.__set_free_servers()
if not no_buckets:
self.__create_buckets()
self._master = self._cb_cluster.get_master_node()
# simply append to this list, any error from log we want to fail test on
self.__report_error_list = []
if self.__fail_on_errors:
self.__report_error_list = ["panic:"]
# for format {ip1: {"panic": 2}}
self.__error_count_dict = {}
if len(self.__report_error_list) > 0 and not self._input.param("skip_host_login", False):
self.__initialize_error_count_dict()
if self.ntonencrypt == 'enable':
self.setup_nton_encryption()
self.log.info("-->End: Setup for the test")
def _enable_diag_eval_on_non_local_hosts(self):
"""
Enable diag/eval to be run on non-local hosts.
:return: Nothing
"""
if self._input.param("skip_host_login", False):
self.log.warning("-->Skipping the host login and not setting the diag eval...")
return
master = self._cb_cluster.get_master_node()
remote = RemoteMachineShellConnection(master)
output, error = remote.enable_diag_eval_on_non_local_hosts()
if output is not None:
if "ok" not in output:
self.log.error("Error in enabling diag/eval on non-local hosts on {}".format(master.ip))
raise Exception("Error in enabling diag/eval on non-local hosts on {}".format(master.ip))
else:
self.log.info(
"Enabled diag/eval for non-local hosts from {}".format(
master.ip))
else:
self.log.info("Running in compatibility mode, not enabled diag/eval for non-local hosts")
def setup_nton_encryption(self):
self.log.info('Setting up node to node encyrption from ')
ntonencryptionBase().setup_nton_cluster(self._input.servers,clusterEncryptionLevel=self.ntonencrypt_level)
def construct_serv_list(self, serv_str):
"""
Constructs a list of node services
to rebalance into cluster
@param serv_str: like "D,D+F,I+Q,F" where the letters
stand for services defined in serv_dict
@return services_list: like ['kv', 'kv,fts', 'index,n1ql','index']
"""
serv_dict = {'D': 'kv', 'F': 'fts', 'I': 'index', 'Q': 'n1ql'}
for letter, serv in list(serv_dict.items()):
serv_str = serv_str.replace(letter, serv)
services_list = re.split('[-,:]', serv_str)
for index, serv in enumerate(services_list):
services_list[index] = serv.replace('+', ',')
return services_list
def __init_parameters(self):
self.__case_number = self._input.param("case_number", 0)
self.__num_sasl_buckets = self._input.param("sasl_buckets", 0)
self.__num_stand_buckets = self._input.param("standard_buckets", 0)
self.__eviction_policy = self._input.param("eviction_policy", 'valueOnly')
self.__mixed_priority = self._input.param("mixed_priority", None)
self.expected_no_of_results = self._input.param("expected_no_of_results", None)
self.polygon_feature = self._input.param("polygon_feature", "regular")
self.num_vertices = self._input.param("num_vertices", None)
# Public init parameters - Used in other tests too.
# Move above private to this section if needed in future, but
# Ensure to change other tests too.
self._cluster_services = \
self.construct_serv_list(self._input.param("cluster", "D,D+F,F"))
self._num_replicas = self._input.param("replicas", 1)
self._create_default_bucket = self._input.param("default_bucket", True)
self._num_items = self._input.param("items", 1000)
self._value_size = self._input.param("value_size", 512)
self._poll_timeout = self._input.param("poll_timeout", 120)
self._update = self._input.param("update", False)
self._delete = self._input.param("delete", False)
self._perc_upd = self._input.param("upd", 30)
self._perc_del = self._input.param("del", 30)
self._expires = self._input.param("expires", 0)
self._wait_for_expiration = self._input.param(
"wait_for_expiration",
True)
self._warmup = self._input.param("warm", "")
self._rebalance = self._input.param("rebalance", "")
self._failover = self._input.param("failover", "")
self._wait_timeout = self._input.param("timeout", 60)
self._disable_compaction = self._input.param("disable_compaction", "")
self._item_count_timeout = self._input.param("item_count_timeout", 300)
self._dgm_run = self._input.param("dgm_run", False)
self._active_resident_ratio = \
self._input.param("active_resident_ratio", 100)
CHECK_AUDIT_EVENT.CHECK = self._input.param("verify_audit", 0)
self._max_verify = self._input.param("max_verify", 100000)
self._num_vbuckets = self._input.param("vbuckets", 1024)
self.lang = self._input.param("lang", "EN")
self.encoding = self._input.param("encoding", "utf-8")
self.analyzer = self._input.param("analyzer", None)
self.index_replicas = self._input.param("index_replicas", None)
self.index_kv_store = self._input.param("kvstore", None)
self.partitions_per_pindex = \
self._input.param("max_partitions_pindex", 171)
self.upd_del_fields = self._input.param("upd_del_fields", None)
self.num_queries = self._input.param("num_queries", 1)
self.query_types = (self._input.param("query_types", "match")).split(',')
self.index_per_bucket = self._input.param("index_per_bucket", 1)
self.dataset = self._input.param("dataset", "emp")
self.sample_query = {"match": "Safiya Morgan", "field": "name"}
self.compare_es = self._input.param("compare_es", False)
if self.compare_es:
if not self.elastic_node:
self.fail("For ES result validation, pls add in the"
" [elastic] section in your ini file,"
" else set \"compare_es\" as False")
self.es = ElasticSearchBase(self.elastic_node, self.log)
self.es.restart_es()
else:
self.es = None
self.run_via_n1ql = self._input.param("run_via_n1ql", False)
if self.run_via_n1ql:
self.n1ql = N1QLHelper(version="sherlock", shell=None,
item_flag=None, n1ql_port=8903,
full_docs_list=[], log=self.log)
else:
self.n1ql = None
self.create_gen = None
self.update_gen = None
self.delete_gen = None
self.sort_fields = self._input.param("sort_fields", None)
self.sort_fields_list = None
if self.sort_fields:
self.sort_fields_list = self.sort_fields.split(',')
self.advanced_sort = self._input.param("advanced_sort", False)
self.sort_by = self._input.param("sort_by", "score")
self.sort_missing = self._input.param("sort_missing", "last")
self.sort_desc = self._input.param("sort_desc", False)
self.sort_mode = self._input.param("sort_mode", "min")
self.__fail_on_errors = self._input.param("fail-on-errors", True)
self.cli_command_location = LINUX_COUCHBASE_BIN_PATH
self.expected_docs = str(self._input.param("expected", None))
self.expected_docs_list = []
if (self.expected_docs) and (',' in self.expected_docs):
self.expected_docs_list = self.expected_docs.split(',')
else:
self.expected_docs_list.append(self.expected_docs)
self.expected_results = self._input.param("expected_results", None)
self.highlight_style = self._input.param("highlight_style", None)
self.highlight_fields = self._input.param("highlight_fields", None)
self.highlight_fields_list = []
if (self.highlight_fields):
if (',' in self.highlight_fields):
self.highlight_fields_list = self.highlight_fields.split(',')
else:
self.highlight_fields_list.append(self.highlight_fields)
self.consistency_level = self._input.param("consistency_level", '')
if self.consistency_level.lower() == 'none':
self.consistency_level = None
self.consistency_vectors = self._input.param("consistency_vectors", {})
if self.consistency_vectors != {}:
self.consistency_vectors = eval(self.consistency_vectors)
if self.consistency_vectors is not None and self.consistency_vectors != '':
if not isinstance(self.consistency_vectors, dict):
self.consistency_vectors = json.loads(self.consistency_vectors)
self.ntonencrypt = self._input.param('ntonencrypt','disable')
self.ntonencrypt_level = self._input.param('ntonencrypt_level','control')
def __initialize_error_count_dict(self):
"""
initializes self.__error_count_dict with ip, error and err count
like {ip1: {"panic": 2}}
"""
for node in self._input.servers:
self.__error_count_dict[node.ip] = {}
self.check_error_count_in_fts_log(initial=True)
self.log.info(self.__error_count_dict)
def __cleanup_previous(self):
self._cb_cluster.cleanup_cluster(self, cluster_shutdown=False)
def __set_free_servers(self):
total_servers = self._input.servers
cluster_nodes = self._cb_cluster.get_nodes()
for server in total_servers:
for cluster_node in cluster_nodes:
if server.ip == cluster_node.ip and \
server.port == cluster_node.port:
break
else:
continue
else:
FloatingServers._serverlist.append(server)
def __calculate_bucket_size(self, cluster_quota, num_buckets):
if 'quota_percent' in self._input.test_params:
quota_percent = int(self._input.test_params['quota_percent'])
else:
quota_percent = None
dgm_run = self._input.param("dgm_run", 0)
if dgm_run:
# buckets cannot be created if size<100MB
bucket_size = 256
elif quota_percent is not None:
bucket_size = int(float(cluster_quota - 500) * float(quota_percent / 100.0) / float(num_buckets))
else:
bucket_size = int((float(cluster_quota) - 500) / float(num_buckets))
return bucket_size
def __create_buckets(self):
# if mixed priority is set by user, set high priority for sasl and
# standard buckets
if self.__mixed_priority:
bucket_priority = 'high'
else:
bucket_priority = None
num_buckets = self.__num_sasl_buckets + \
self.__num_stand_buckets + int(self._create_default_bucket)
if self._bucket_size:
bucket_size = self._bucket_size
else:
total_quota = self._cb_cluster.get_mem_quota()
bucket_size = self.__calculate_bucket_size( total_quota, num_buckets)
bucket_type = TestInputSingleton.input.param("bucket_type", "membase")
maxttl = TestInputSingleton.input.param("maxttl", None)
if self._create_default_bucket:
self._cb_cluster.create_default_bucket(
bucket_size,
self._num_replicas,
eviction_policy=self.__eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl)
self._cb_cluster.create_sasl_buckets(
bucket_size, num_buckets=self.__num_sasl_buckets,
num_replicas=self._num_replicas,
eviction_policy=self.__eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl)
self._cb_cluster.create_standard_buckets(
bucket_size, num_buckets=self.__num_stand_buckets,
num_replicas=self._num_replicas,
eviction_policy=self.__eviction_policy,
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl)
def create_buckets_on_cluster(self):
# if mixed priority is set by user, set high priority for sasl and
# standard buckets
self.__create_buckets()
def load_sample_buckets(self, server, bucketName):
from lib.remote.remote_util import RemoteMachineShellConnection
shell = RemoteMachineShellConnection(server)
shell.execute_command("""curl -v -u Administrator:password \
-X POST http://{0}:8091/sampleBuckets/install \
-d '["{1}"]'""".format(server.ip, bucketName))
shell.disconnect()
self.sleep(20)
def load_employee_dataset(self, num_items=None):
"""
Loads the default JSON dataset
see JsonDocGenerator in documentgenerator.py
"""
self.log.info("Beginning data load ...")
if not num_items:
num_items = self._num_items
if not self._dgm_run:
self._cb_cluster.load_all_buckets(num_items, self._value_size)
else:
self._cb_cluster.load_all_buckets_till_dgm(
active_resident_ratio=self._active_resident_ratio,
items=self._num_items)
def load_utf16_data(self, num_keys=None):
"""
Loads the default JSON dataset in utf-16 format
"""
if not num_keys:
num_keys = self._num_items
gen = JsonDocGenerator("C1",
encoding="utf-16",
start=0,
end=num_keys)
self._cb_cluster.load_all_buckets_from_generator(gen)
def load_wiki(self, num_keys=None, lang="EN", encoding="utf-8"):
"""
Loads the Wikipedia dump.
Languages supported : EN(English)/ES(Spanish)/DE(German)/FR(French)
"""
if not num_keys:
num_keys = self._num_items
gen = WikiJSONGenerator("wiki",
lang=lang,
encoding=encoding,
start=0,
end=num_keys)
self._cb_cluster.load_all_buckets_from_generator(gen)
def load_earthquakes(self, num_keys=None):
"""
Loads geo-spatial jsons from earthquakes.json .
"""
if not num_keys:
num_keys = self._num_items
gen = GeoSpatialDataLoader("earthquake",
start=0,
end=num_keys)
self._cb_cluster.load_all_buckets_from_generator(gen)
def perform_update_delete(self, fields_to_update=None):
"""
Call this method to perform updates/deletes on your cluster.
It checks if update=True or delete=True params were passed in
the test.
@param fields_to_update - list of fields to update in JSON
"""
# UPDATES
if self._update:
self.log.info("Updating keys @ {0}".format(self._cb_cluster.get_name()))
self._cb_cluster.update_delete_data(
OPS.UPDATE,
fields_to_update=fields_to_update,
perc=self._perc_upd,
expiration=self._expires,
wait_for_expiration=self._wait_for_expiration)
# DELETES
if self._delete:
self.log.info("Deleting keys @ {0}".format(self._cb_cluster.get_name()))
self._cb_cluster.update_delete_data(OPS.DELETE, perc=self._perc_del)
def async_perform_update_delete(self, fields_to_update=None):
"""
Call this method to perform updates/deletes on your cluster.
It checks if update=True or delete=True params were passed in
the test.
@param fields_to_update - list of fields to update in JSON
"""
load_tasks = []
# UPDATES
if self._update:
self.log.info("Updating keys @ {0} with expiry={1}".
format(self._cb_cluster.get_name(), self._expires))
self.populate_update_gen(fields_to_update)
if self.compare_es:
gen = copy.deepcopy(self.update_gen)
if not self._expires:
if isinstance(gen, list):
for generator in gen:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=generator,
op_type=OPS.UPDATE))
else:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=gen,
op_type=OPS.UPDATE))
else:
# an expire on CB translates to delete on ES
if isinstance(gen, list):
for generator in gen:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=generator,
op_type=OPS.DELETE))
else:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=gen,
op_type=OPS.DELETE))
load_tasks += self._cb_cluster.async_load_all_buckets_from_generator(
kv_gen=self.update_gen,
ops=OPS.UPDATE,
exp=self._expires)
[task.result() for task in load_tasks]
if load_tasks:
self.log.info("Batched updates loaded to cluster(s)")
load_tasks = []
# DELETES
if self._delete:
self.log.info("Deleting keys @ {0}".format(self._cb_cluster.get_name()))
self.populate_delete_gen()
if self.compare_es:
del_gen = copy.deepcopy(self.delete_gen)
if isinstance(del_gen, list):
for generator in del_gen:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=generator,
op_type=OPS.DELETE))
else:
load_tasks.append(self.es.async_bulk_load_ES(
index_name='es_index',
gen=del_gen,
op_type=OPS.DELETE))
load_tasks += self._cb_cluster.async_load_all_buckets_from_generator(
self.delete_gen, OPS.DELETE)
[task.result() for task in load_tasks]
if load_tasks:
self.log.info("Batched deletes sent to cluster(s)")
if self._wait_for_expiration and self._expires:
self.sleep(
self._expires,
"Waiting for expiration of updated items")
self._cb_cluster.run_expiry_pager()
def print_crash_stacktrace(self, node, error):
""" Prints panic stacktrace from goxdcr.log*
"""
shell = RemoteMachineShellConnection(node)
result, err = shell.execute_command("zgrep -A 40 -B 4 '{0}' {1}/fts.log*".
format(error, NodeHelper.get_log_dir(node)))
for line in result:
self.log.info(line)
shell.disconnect()
def check_error_count_in_fts_log(self, initial=False):
"""
checks if new errors from self.__report_error_list
were found on any of the goxdcr.logs
"""
error_found_logger = []
fts_log = NodeHelper.get_log_dir(self._input.servers[0]) + '/fts.log*'
for node in self._input.servers:
shell = RemoteMachineShellConnection(node)
if not shell.is_ssh_allowed:
self.log.warning("No check for error count as ssh to the node is available!")
return
for error in self.__report_error_list:
count, err = shell.execute_command(
"zgrep \"{0}\" {1} | wc -l".format(error, fts_log))
if isinstance(count, list):
count = int(count[0])
else:
count = int(count)
NodeHelper._log.info(count)
if initial:
self.__error_count_dict[node.ip][error] = count
else:
self.log.info("Initial '{0}' count on {1} :{2}, now :{3}".
format(error,
node.ip,
self.__error_count_dict[node.ip][error],
count))
if node.ip in list(self.__error_count_dict.keys()):
if (count > self.__error_count_dict[node.ip][error]):
error_found_logger.append("{0} found on {1}".format(error,
node.ip))
self.print_crash_stacktrace(node, error)
shell.disconnect()
if not initial:
if error_found_logger:
self.log.error(error_found_logger)
return error_found_logger
def sleep(self, timeout=1, message=""):
self.log.info("sleep for {0} secs. {1} ...".format(timeout, message))
time.sleep(timeout)
def wait_for_indexing_complete(self, item_count=None):
"""
Wait for index_count for any index to stabilize or reach the
index count specified by item_count
"""
retry = self._input.param("index_retry", 20)
for index in self._cb_cluster.get_indexes():
if index.index_type == "fulltext-alias":
continue
retry_count = retry
prev_count = 0
es_index_count = 0
while retry_count > 0:
fail = False
try:
index_doc_count = index.get_indexed_doc_count()
bucket_doc_count = index.get_src_bucket_doc_count()
if not self.compare_es:
self.log.info("Docs in bucket = %s, docs in FTS index '%s': %s"
% (bucket_doc_count,
index.name,
index_doc_count))
if retry_count == 1:
fail = True
self.fail("FTS index count not matching bucket count even after 20 tries: "
"Docs in bucket = %s, docs in FTS index '%s': %s" % (bucket_doc_count,
index.name,
index_doc_count))
else:
self.es.update_index('es_index')
es_index_count = self.es.get_index_count('es_index')
self.log.info("Docs in bucket = %s, docs in FTS index '%s':"
" %s, docs in ES index: %s "
% (bucket_doc_count,
index.name,
index_doc_count,
es_index_count))
if retry_count == 1:
fail = True
self.fail("FTS/ES index count not matching bucket count even after 20 tries: "
"Docs in bucket = %s, docs in FTS index '%s': %s, docs in ES index: %s "
% (bucket_doc_count,
index.name,
index_doc_count,
es_index_count))
if bucket_doc_count == 0:
if item_count and item_count != 0:
self.sleep(5,
"looks like docs haven't been loaded yet...")
retry_count -= 1
continue
if item_count and index_doc_count > item_count:
break
if bucket_doc_count == index_doc_count:
if self.compare_es:
if bucket_doc_count == es_index_count:
break
elif retry_count == 1:
fail = True
self.fail(
"ES index count not matching with bucket_doc_count. Docs in bucket = %s, docs "
"in FTS index '%s': %s, docs in ES index: %s " % (
bucket_doc_count, index.name,
index_doc_count,
es_index_count))
else:
break
if prev_count < index_doc_count or prev_count > index_doc_count:
prev_count = index_doc_count
retry_count = retry
else:
retry_count -= 1
except Exception as e:
self.log.info(e)
if fail:
self.fail(e)
retry_count -= 1
time.sleep(6)
# now wait for num_mutations_to_index to become zero to handle the pure
# updates scenario - where doc count remains unchanged
retry_mut_count = 20
if item_count == None:
while True and retry_count:
num_mutations_to_index = index.get_num_mutations_to_index()
if num_mutations_to_index > 0:
self.sleep(5, "num_mutations_to_index: {0} > 0".format(num_mutations_to_index))
retry_count -= 1
else:
break
def construct_plan_params(self):
plan_params = {}
plan_params['numReplicas'] = 0
if self.index_replicas:
plan_params['numReplicas'] = self.index_replicas
if self.partitions_per_pindex:
plan_params['maxPartitionsPerPIndex'] = self.partitions_per_pindex
return plan_params
def populate_node_partition_map(self, index):
"""
populates the node-pindex-partition map
"""
nodes_partitions = {}
start_time = time.time()
_, defn = index.get_index_defn()
while 'planPIndexes' not in defn or not defn['planPIndexes']:
if time.time() - start_time > 60:
self.fail("planPIndexes unavailable for index {0} even after 60s"
.format(index.name))
self.sleep(5, "No pindexes found, waiting for index to get created")
_, defn = index.get_index_defn()
for pindex in defn['planPIndexes']:
for node, attr in list(pindex['nodes'].items()):
if attr['priority'] == 0:
break
if node not in list(nodes_partitions.keys()):
nodes_partitions[node] = {'pindex_count': 0, 'pindexes': {}}
nodes_partitions[node]['pindex_count'] += 1
nodes_partitions[node]['pindexes'][pindex['uuid']] = []
for partition in pindex['sourcePartitions'].split(','):
nodes_partitions[node]['pindexes'][pindex['uuid']].append(partition)
return nodes_partitions
def is_index_partitioned_balanced(self, index):
"""
Perform some plan validation to make sure the index is
partitioned and balanced on all nodes.
Check the following -
1. if number of pindexes = num_vbuckets/max_partitions_per_pindex
2. if each pindex is servicing not more than max_partitions_per_pindex
3. if index is distributed - present on all fts nodes, almost equally?
4. if index balanced - every fts node services almost equal num of vbs?
"""
self.log.info("Validating index distribution for %s ..." % index.name)
nodes_partitions = self.populate_node_partition_map(index)
# check 1 - test number of pindexes
partitions_per_pindex = index.get_max_partitions_pindex()
exp_num_pindexes = self._num_vbuckets // partitions_per_pindex
if self._num_vbuckets % partitions_per_pindex:
import math
exp_num_pindexes = math.ceil(
self._num_vbuckets // partitions_per_pindex + 0.5)
total_pindexes = 0
for node in list(nodes_partitions.keys()):
total_pindexes += nodes_partitions[node]['pindex_count']
if total_pindexes != exp_num_pindexes:
self.fail("Number of pindexes for %s is %s while"
" expected value is %s" % (index.name,
total_pindexes,
exp_num_pindexes))
self.log.info("Validated: Number of PIndexes = %s" % total_pindexes)
index.num_pindexes = total_pindexes
# check 2 - each pindex servicing "partitions_per_pindex" vbs
num_fts_nodes = len(self._cb_cluster.get_fts_nodes())
for node in list(nodes_partitions.keys()):
for uuid, partitions in list(nodes_partitions[node]['pindexes'].items()):
if len(partitions) > partitions_per_pindex:
self.fail("sourcePartitions for pindex %s more than "
"max_partitions_per_pindex %s" %
(uuid, partitions_per_pindex))
self.log.info("Validated: Every pIndex serves %s partitions or lesser"
% partitions_per_pindex)
# check 3 - distributed - pindex present on all fts nodes?
count = 0
nodes_with_pindexes = len(list(nodes_partitions.keys()))
if nodes_with_pindexes > 1:
while nodes_with_pindexes != num_fts_nodes:
count += 10
if count == 60:
self.fail("Even after 60s of waiting, index is not properly"
" distributed,pindexes spread across %s while "
"fts nodes are %s" % (list(nodes_partitions.keys()),
self._cb_cluster.get_fts_nodes()))
self.sleep(10, "pIndexes not distributed across %s nodes yet"
% num_fts_nodes)
nodes_partitions = self.populate_node_partition_map(index)
nodes_with_pindexes = len(list(nodes_partitions.keys()))
else:
self.log.info("Validated: pIndexes are distributed across %s "
% list(nodes_partitions.keys()))
# check 4 - balance check(almost equal no of pindexes on all fts nodes)
exp_partitions_per_node = self._num_vbuckets // num_fts_nodes
self.log.info("Expecting num of partitions in each node in range %s-%s"
% (exp_partitions_per_node - partitions_per_pindex,
min(1024, exp_partitions_per_node + partitions_per_pindex)))
for node in list(nodes_partitions.keys()):
num_node_partitions = 0
for uuid, partitions in list(nodes_partitions[node]['pindexes'].items()):
num_node_partitions += len(partitions)
if abs(num_node_partitions - exp_partitions_per_node) > \
partitions_per_pindex:
self.fail("The source partitions are not evenly distributed "
"among nodes, seeing %s on %s"
% (num_node_partitions, node))
self.log.info("Validated: Node %s houses %s pindexes which serve"
" %s partitions" %
(node,
nodes_partitions[node]['pindex_count'],
num_node_partitions))
return True
def generate_random_queries(self, index, num_queries=1, query_type=["match"],
seed=0):
"""
Calls FTS-ES Query Generator for employee dataset
@param num_queries: number of queries to return
@query_type: a list of different types of queries to generate
like: query_type=["match", "match_phrase","bool",
"conjunction", "disjunction"]
"""
from .random_query_generator.rand_query_gen import FTSESQueryGenerator
query_gen = FTSESQueryGenerator(num_queries, query_type=query_type,
seed=seed, dataset=self.dataset,
fields=index.smart_query_fields)
for fts_query in query_gen.fts_queries:
index.fts_queries.append(
json.loads(json.dumps(fts_query, ensure_ascii=False)))
if self.compare_es:
for es_query in query_gen.es_queries:
# unlike fts, es queries are not nested before sending to fts
# so enclose in query dict here
es_query = {'query': es_query}
self.es.es_queries.append(
json.loads(json.dumps(es_query, ensure_ascii=False)))
return index.fts_queries, self.es.es_queries
return index.fts_queries
def generate_random_geo_queries(self, index, num_queries=1, sort=False):
"""
Generates a bunch of geo location and bounding box queries for
fts and es.
:param index: fts index object
:param num_queries: no of queries to be generated
:return: fts or fts and es queries
"""
import random
from .random_query_generator.rand_query_gen import FTSESQueryGenerator
gen_queries = 0
while gen_queries < num_queries:
if bool(random.getrandbits(1)):
fts_query, es_query = FTSESQueryGenerator. \
construct_geo_location_query()
else:
fts_query, es_query = FTSESQueryGenerator. \
construct_geo_bounding_box_query()
index.fts_queries.append(
json.loads(json.dumps(fts_query, ensure_ascii=False)))
if self.compare_es:
self.es.es_queries.append(
json.loads(json.dumps(es_query, ensure_ascii=False)))
gen_queries += 1
if self.es:
return index.fts_queries, self.es.es_queries
else:
return index.fts_queries
def generate_random_geo_polygon_queries(self, index, num_queries=1, polygon_feature="regular", num_vertices=None):
"""
Generates a bunch of geo polygon queries for
fts and es.
:param num_vertices: number of vertexes in the polygon
:param polygon_feature: regular or irregular
:param index: fts index object
:param num_queries: no of queries to be generated
:return: fts or fts and es queries
"""
gen_queries = 0
from lib.couchbase_helper.data import LON_LAT
while gen_queries < num_queries:
center = random.choice(LON_LAT)
fts_query, es_query, ave_radius, num_verts, format = FTSESQueryGenerator.construct_geo_polygon_query(center,
polygon_feature,
num_vertices)
index.fts_queries.append(
json.loads(json.dumps(fts_query, ensure_ascii=False)))
if self.compare_es:
self.es.es_queries.append(
json.loads(json.dumps(es_query, ensure_ascii=False)))
gen_queries += 1
self.log.info("query " + str(gen_queries) + " generated for the polygon with center: " + str(
center) + ", num_vertices: " + str(num_verts) +
", ave_radius: " + str(ave_radius) + " and format: " + str(format))
if self.es:
return index.fts_queries, self.es.es_queries
else:
return index.fts_queries
def create_index(self, bucket, index_name, index_params=None,
plan_params=None):
"""
Creates a default index given bucket, index_name and plan_params
"""
bucket_password = ""
if bucket.authType == "sasl":
bucket_password = bucket.saslPassword
if not plan_params:
plan_params = self.construct_plan_params()
index = self._cb_cluster.create_fts_index(
name=index_name,
source_name=bucket.name,
index_params=index_params,
plan_params=plan_params)
self.is_index_partitioned_balanced(index)
return index
def create_fts_indexes_all_buckets(self, plan_params=None):
"""
Creates 'n' default indexes for all buckets.
'n' is defined by 'index_per_bucket' test param.
"""
for bucket in self._cb_cluster.get_buckets():
for count in range(self.index_per_bucket):
self.create_index(
bucket,
"%s_index_%s" % (bucket.name, count + 1),
plan_params=plan_params)
def create_alias(self, target_indexes, name=None, alias_def=None):
"""
Creates an alias spanning one or many target indexes
"""
if not name:
name = 'alias_%s' % int(time.time())
if not alias_def:
alias_def = {"targets": {}}
for index in target_indexes:
alias_def['targets'][index.name] = {}
return self._cb_cluster.create_fts_index(name=name,
index_type='fulltext-alias',
index_params=alias_def)
def validate_index_count(self, equal_bucket_doc_count=False,
zero_rows_ok=True, must_equal=None):
"""
Handle validation and error logging for docs indexed
returns a map containing index_names and docs indexed
"""
index_name_count_map = {}
for index in self._cb_cluster.get_indexes():
docs_indexed = index.get_indexed_doc_count()
bucket_count = self._cb_cluster.get_doc_count_in_bucket(
index.source_bucket)
self.log.info("Docs in index {0}={1}, bucket docs={2}".
format(index.name, docs_indexed, bucket_count))
if must_equal and docs_indexed != int(must_equal):
self.fail("Number of docs indexed is not %s" % must_equal)
if docs_indexed == 0 and not zero_rows_ok:
self.fail("No docs were indexed for index %s" % index.name)
if equal_bucket_doc_count:
if docs_indexed != bucket_count:
self.fail("Bucket doc count = %s, index doc count=%s" %
(bucket_count, docs_indexed))
index_name_count_map[index.name] = docs_indexed
return index_name_count_map
def is_index_complete(self, name):
"""
Handle validation and error logging for docs indexed
returns a map containing index_names and docs indexed
"""
for index in self._cb_cluster.get_indexes():
if index.name == name:
docs_indexed = index.get_indexed_doc_count()
bucket_count = self._cb_cluster.get_doc_count_in_bucket(
index.source_bucket)
self.log.info("Docs in index {0}={1}, bucket docs={2}".
format(index.name, docs_indexed, bucket_count))
if docs_indexed != bucket_count:
return False
else:
return True
def setup_es(self):
"""
Setup Elastic search - create empty index node defined under
'elastic' section in .ini
"""
self.create_index_es()
def teardown_es(self):
self.es.delete_indices()
def create_es_index_mapping(self, es_mapping, fts_mapping=None):
if not (self.num_custom_analyzers > 0):
self.es.create_index_mapping(index_name="es_index",
es_mapping=es_mapping, fts_mapping=None)
else:
self.es.create_index_mapping(index_name="es_index",
es_mapping=es_mapping, fts_mapping=fts_mapping)
def load_data_es_from_generator(self, generator,
index_name="es_index"):
"""
Loads json docs into ES from a generator, does a blocking load
"""
for key, doc in generator:
doc = json.loads(doc)
self.es.load_data(index_name,
json.dumps(doc, encoding='utf-8'),
doc['_type'],
key)
def get_zap_docvalue_disksize(self):
shell = RemoteMachineShellConnection(self._cb_cluster.get_random_fts_node())
command = 'cd /opt/couchbase/var/lib/couchbase/data/\\@fts; find . -name "*.zap"| sort -n | ' \
'tail -1 | xargs -I {} sh -c "/opt/couchbase/bin/cbft-bleve zap docvalue {} | tail -1"'
output, error = shell.execute_command(command)
if error and "remoteClients registered for tls config updates" not in error[0]:
self.fail("error running command : {0} , error : {1}".format(command, error))
self.log.info(output)
self.log.info(re.findall(r"\d+\.\d+", output[0]))
return re.findall(r"\d+\.\d+", output[0])[0]
def create_geo_index_and_load(self):
"""
Indexes geo spatial data
Normally when we have a nested object, we first "insert child mapping"
and then refer to the fields inside it. But, for geopoint, the
structure "geo" is the data being indexed. Refer: CBQE-4030
:return: the index object
"""
if self.compare_es:
self.log.info("Creating a geo-index on Elasticsearch...")
self.es.delete_indices()
es_mapping = {
"earthquake": {
"properties": {
"geo": {
"type": "geo_point"
}
}
}
}
self.create_es_index_mapping(es_mapping=es_mapping)
self.log.info("Creating geo-index ...")
from .fts_base import FTSIndex
geo_index = FTSIndex(
cluster=self._cb_cluster,
name="geo-index",
source_name="default",
)
geo_index.index_definition["params"] = {
"mapping": {
"default_mapping": {
"dynamic": True,
"enabled": False
},
"types": {
"earthquake": {
"enabled": True,
"properties": {
"geo": {
"dynamic": False,
"enabled": True,
"fields": [{
"docvalues": True,
"include_in_all": True,
"name": "geo",
"type": "geopoint",
"store": False,
"index": True
}
]
}
}
}
}
}
}
geo_index.create()
self.is_index_partitioned_balanced(geo_index)
self.dataset = "earthquakes"
self.log.info("Loading earthquakes.json ...")
self.async_load_data()
self.sleep(10, "Waiting to load earthquakes.json ...")
self.wait_for_indexing_complete()
return geo_index
def create_index_es(self, index_name="es_index"):
self.es.create_empty_index_with_bleve_equivalent_std_analyzer(index_name)
self.log.info("Created empty index %s on Elastic Search node with "
"custom standard analyzer(default)"
% index_name)
def get_generator(self, dataset, num_items, start=0, encoding="utf-8",
lang="EN"):
"""
Returns a generator depending on the dataset
"""
if dataset == "emp":
return JsonDocGenerator(name="emp",
encoding=encoding,
start=start,
end=start + num_items)
elif dataset == "wiki":
return WikiJSONGenerator(name="wiki",
lang=lang,
encoding=encoding,
start=start,
end=start + num_items)
elif dataset == "earthquakes":
return GeoSpatialDataLoader(name="earthquake",
start=start,
end=start + num_items)
def populate_create_gen(self):
if self.dataset == "all":
# only emp and wiki
self.create_gen = []
self.create_gen.append(self.get_generator(
"emp", num_items=self._num_items // 2))
self.create_gen.append(self.get_generator(
"wiki", num_items=self._num_items // 2))
else:
self.create_gen = self.get_generator(
self.dataset, num_items=self._num_items)
def populate_update_gen(self, fields_to_update=None):
if self.dataset == "emp":
self.update_gen = copy.deepcopy(self.create_gen)
self.update_gen.start = 0
self.update_gen.end = int(self.create_gen.end *
(float)(self._perc_upd) / 100)
self.update_gen.update(fields_to_update=fields_to_update)
elif self.dataset == "wiki":
self.update_gen = copy.deepcopy(self.create_gen)
self.update_gen.start = 0
self.update_gen.end = int(self.create_gen.end *
(float)(self._perc_upd) / 100)
elif self.dataset == "all":
self.update_gen = []
self.update_gen = copy.deepcopy(self.create_gen)
for itr, _ in enumerate(self.update_gen):
self.update_gen[itr].start = 0
self.update_gen[itr].end = int(self.create_gen[itr].end *
(float)(self._perc_upd) / 100)
if self.update_gen[itr].name == "emp":
self.update_gen[itr].update(fields_to_update=fields_to_update)
def populate_delete_gen(self):
if self.dataset == "emp":
self.delete_gen = JsonDocGenerator(
self.create_gen.name,
op_type=OPS.DELETE,
encoding="utf-8",
start=int((self.create_gen.end)
* (float)(100 - self._perc_del) / 100),
end=self.create_gen.end)
elif self.dataset == "wiki":
self.delete_gen = WikiJSONGenerator(name="wiki",
encoding="utf-8",
start=int((self.create_gen.end)
* (float)(100 - self._perc_del) / 100),
end=self.create_gen.end,
op_type=OPS.DELETE)
elif self.dataset == "all":
self.delete_gen = []
self.delete_gen.append(JsonDocGenerator(
"emp",
op_type=OPS.DELETE,
encoding="utf-8",
start=int((self.create_gen[0].end)
* (float)(100 - self._perc_del) / 100),
end=self.create_gen[0].end))
self.delete_gen.append(WikiJSONGenerator(name="wiki",
encoding="utf-8",
start=int((self.create_gen[1].end)
* (float)(100 - self._perc_del) / 100),
end=self.create_gen[1].end,
op_type=OPS.DELETE))
def load_data(self):
"""
Blocking call to load data to Couchbase and ES
"""
if self._dgm_run:
self.create_gen = self._cb_cluster.load_all_buckets_till_dgm(
self._active_resident_ratio,
self.compare_es)
return
load_tasks = self.async_load_data()
for task in load_tasks:
task.result()
self.log.info("Loading phase complete!")
def async_load_data(self):
"""
For use to run with parallel tasks like rebalance, failover etc
"""
load_tasks = []
self.populate_create_gen()
if self.compare_es:
gen = copy.deepcopy(self.create_gen)
if isinstance(gen, list):
for generator in gen:
load_tasks.append(self.es.async_bulk_load_ES(index_name='es_index',
gen=generator,
op_type='create'))
else:
load_tasks.append(self.es.async_bulk_load_ES(index_name='es_index',
gen=gen,
op_type='create'))
load_tasks += self._cb_cluster.async_load_all_buckets_from_generator(
self.create_gen)
return load_tasks
def run_query_and_compare(self, index=None, es_index_name=None, n1ql_executor=None):
"""
Runs every fts query and es_query and compares them as a single task
Runs as many tasks as there are queries
"""
tasks = []
fail_count = 0
failed_queries = []
for count in range(0, len(index.fts_queries)):
tasks.append(self._cb_cluster.async_run_fts_query_compare(
fts_index=index,
es=self.es,
es_index_name=es_index_name,
query_index=count,
n1ql_executor=n1ql_executor))
num_queries = len(tasks)
for task in tasks:
task.result()
if not task.passed:
fail_count += 1
failed_queries.append(task.query_index + 1)
if fail_count:
self.fail("%s out of %s queries failed! - %s" % (fail_count,
num_queries,
failed_queries))
else:
self.log.info("SUCCESS: %s out of %s queries passed"
% (num_queries - fail_count, num_queries))
def grab_fts_diag(self):
"""
Grab fts diag until it is handled by cbcollect info
"""
from http.client import BadStatusLine
import os
import urllib.request, urllib.error, urllib.parse
import gzip
import base64
path = TestInputSingleton.input.param("logs_folder", "/tmp")
for serverInfo in self._cb_cluster.get_fts_nodes():
if not self.__is_cluster_run():
serverInfo.fts_port = 8094
self.log.info("Grabbing fts diag from {0}...".format(serverInfo.ip))
diag_url = "http://{0}:{1}/api/diag".format(serverInfo.ip,
serverInfo.fts_port)
self.log.info(diag_url)
try:
req = urllib.request.Request(diag_url)
authorization = base64.encodebytes('%s:%s' % (
self._input.membase_settings.rest_username,
self._input.membase_settings.rest_password))
req.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Basic %s' % authorization,
'Accept': '*/*'}
filename = "{0}_fts_diag.json".format(serverInfo.ip)
page = urllib.request.urlopen(req)
with open(path + '/' + filename, 'wb') as output:
os.write(1, "downloading {0} ...".format(serverInfo.ip))
while True:
buffer = page.read(65536)
if not buffer:
break
output.write(buffer)
os.write(1, ".")
file_input = open('{0}/{1}'.format(path, filename), 'rb')
zipped = gzip.open("{0}/{1}.gz".format(path, filename), 'wb')
zipped.writelines(file_input)
file_input.close()
zipped.close()
os.remove(path + '/' + filename)
print(("downloaded and zipped diags @ : {0}/{1}".format(path,
filename)))
except urllib.error.URLError as error:
print(("unable to obtain fts diags from {0}".format(diag_url)))
except BadStatusLine:
print(("unable to obtain fts diags from {0}".format(diag_url)))
except Exception as e:
print(("unable to obtain fts diags from {0} :{1}".format(diag_url, e)))
def backup_pindex_data(self, server):
remote = RemoteMachineShellConnection(server)
stamp = time.strftime("%d_%m_%Y_%H_%M")
data_dir = NodeHelper.get_data_dir(server)
try:
info = remote.extract_remote_info()
if info.type.lower() != 'windows':
self.log.info("Backing up pindex data files from {0}".format(server.ip))
command = "mkdir -p /tmp/backup_pindex_data/{0};" \
"zip -r /tmp/backup_pindex_data/{0}/fts_pindex_data.zip " \
"{1}/data/@fts/*".format(stamp, data_dir)
remote.execute_command(command)
output, error = remote.execute_command("ls -la /tmp/backup_pindex_data/{0}".format(stamp))
for o in output:
print(o)
self.log.info(
"***pindex files for {0} are copied to /tmp/backup_pindex_data/{1} on {0}".format(server.ip, stamp))
remote.disconnect()
return True
except Exception as ex:
print(ex)
return False
def build_sort_params(self):
"""
This method builds the value for the sort param that is passed to the
query request. It handles simple or advanced sorting based on the
inputs passed in the conf file
:return: Value for the sort param
"""
# TBD :
# Cases where there are multiple sort fields - one advanced, one simple
# Cases where there are multiple sort fields - one advanced using by 'field', and another using by 'id' or 'score'
sort_params = []
if self.advanced_sort or self.sort_fields_list:
if self.advanced_sort:
for sort_field in self.sort_fields_list:
params = {}
params["by"] = self.sort_by
if self.sort_by == "field":
params["field"] = sort_field
params["mode"] = self.sort_mode
params["desc"] = self.sort_desc
params["missing"] = self.sort_missing
sort_params.append(params)
else:
sort_params = self.sort_fields_list
else:
return None
return sort_params
def create_test_dataset(self, server, docs):
"""
Creates documents using MemcachedClient in the default bucket
from a given list of json data
:param server: Server on which docs are to be loaded
:param docs: List of json data
:return: None
"""
memc_client = KVStoreAwareSmartClient(RestConnection(server),
'default')
count = 1
for i, doc in enumerate(docs):
while True:
try:
memc_client.set(key=str(i + 1),
value=json.dumps(doc))
break
except Exception as e:
self.log.error(e)
self.sleep(5)
count += 1
if count > 5:
raise e
def wait_till_items_in_bucket_equal(self, items=None):
"""
Waits till items in bucket is equal to the docs loaded
:param items: the item count that the test should wait to reach
after loading
:return: Nothing
"""
if not self._dgm_run:
counter = 0
if not items:
items = self._num_items // 2
while True:
try:
buckets = self._cb_cluster.get_buckets()
if len(buckets) == 0:
self.log.error("No buckets found!")
break
doc_count = self._cb_cluster.get_doc_count_in_bucket(buckets[0])
break
except KeyError:
self.log.info("bucket stats not ready yet...")
self.sleep(2)
for bucket in self._cb_cluster.get_buckets():
while items > self._cb_cluster.get_doc_count_in_bucket(
bucket):
self.log.info("Docs in bucket {0} = {1}".
format(
bucket.name,
self._cb_cluster.get_doc_count_in_bucket(
bucket)))
self.sleep(1, "sleeping 1s to allow for item loading")
counter += 1
if counter > 20:
self.log.info("Exiting load sleep loop after 21s")
return
| 42.804226 | 129 | 0.55628 |
acf2dd63cad8c78559ab9027c648086fd50779a7 | 2,655 | py | Python | examples/cattools_funs_demos.py | CHEN-Zhaohui/geoist | 06a00db3e0ed3d92abf3e45b7b3bfbef6a858a5b | [
"MIT"
] | null | null | null | examples/cattools_funs_demos.py | CHEN-Zhaohui/geoist | 06a00db3e0ed3d92abf3e45b7b3bfbef6a858a5b | [
"MIT"
] | null | null | null | examples/cattools_funs_demos.py | CHEN-Zhaohui/geoist | 06a00db3e0ed3d92abf3e45b7b3bfbef6a858a5b | [
"MIT"
] | null | null | null | """
Earthquake Catalog Analysis
"""
from os.path import dirname
import numpy as np
#local import
from geoist.cattools import Catalogue as Cat
from geoist.cattools import Exploration as Exp
from geoist.cattools import MapTools as Map
from geoist.cattools import Selection as Sel
from geoist.cattools import Seismicity as Sem
from geoist.cattools import Declusterer as Declus
from geoist.cattools import Smoothing as Sm
from geoist.cattools import CatUtils as Ct
#-----------------------------------------------------------------------------------------
pathname = dirname(__file__)
H = ['Id','','Year','Month','Day','Hour','Minute','Second',
'Longitude','Latitude','','','','Depth','DepError',
'MagSize','MagError','','','','','','','','','']
Db = Cat.Database('ISC-GEM')
Db.Import(pathname+'/data/isc-gem-v3.csv',Header=H, SkipLine=1, Delimiter=',')
Db.SetField('LocCode','ISC-GEM')
Db.SetField('MagCode','ISC-GEM')
Db.SetField('MagType','MW')
#-----------------------------------------------------------------------------------------
# Search Area (China) using internal filter
lon = [70, 135]
lat = [15, 55]
#地震筛选
Db.Filter('Latitude',lat[0],Opr='>=')
Db.Filter('Latitude',lat[1],Opr='<=')
Db.Filter('Longitude',lon[0],Opr='>=')
Db.Filter('Longitude',lon[1],Opr='<=')
Exp.AgencyReport(Db, 'L')
#二维时间序列图
Exp.MagTimePlot(Db)
Exp.MagTimeBars(Db)
Exp.RateDensityPlot(Db)
# G-R关系
Enum, Mbin =Exp.GetKeyHisto(Db,'MagSize',Bnum=10, Norm=False)
Minc= (max(Mbin)-min(Mbin))/10.
#拟合b值
a,b = Sem.MfdOptimize(Enum, Mbin, Minc, max(Mbin))
print('b-value=',b)
#复发概率
Sem.MfdPlot(a,b, max(Mbin),Enum=Enum, Ecum=np.cumsum(Enum[::-1])[::-1], Mbin=Mbin, Minc=[Minc])
#重复事件监测
Log = Sel.MergeDuplicate(Db,Twin=60.,Swin=50.,Log=1)
Exp.DuplicateCheck(Log)
#去余震
Dbm, Log1 = Declus.WindowSearch(Db)
#目录摘要
Dbm.Info()
#震中提取
x1,y1,z1 = Exp.GetHypocenter(Db)
x2,y2,z2 = Exp.GetHypocenter(Dbm)
p = [(90.,20.),(90.,40.),(105.,40.),(105.,20.),(90.,20.)]
P = Ct.Polygon()
P.Load(p)
cfg = {'Bounds': [70., 15., 135., 55.],
'FigSize': [8., 6.],
'Background': ['none',[0.9,0.8,0.6],[0.5,0.8,1.]],
'Grid': [10., 10.]}
M = Map.GeoMap(cfg)
M.BasePlot()
M.DrawBounds()
M.DrawGrid()
#震中分布图
M.PointPlot(x1, y1, Set=['o','g',5,1], Label='All')
M.PointPlot(x2, y2, Set=['*','r',2,1], Label='Main')
M.AreaPlot(P.x, P.y, Set=['y',0.5,'k',1])
#平滑地震目录
wkt = Ct.XYToWkt(P.x, P.y)
xsm, ysm, asm = Sm.SmoothMFD(Db, 1., wkt, Delta=0.5)
#M.PointPlot(xsm, ysm, Set=['o','b',2,1], Label='Grid')
M.MeshPlot(xsm, ysm, asm)
M.Legend()
M.Title('Earthquakes in China')
M.Show()
#print('dump to:'+pathname+'/data/isc-gem-v3.bin')
#Db.Dump(pathname+'/data/isc-gem-v3.bin')
| 30.170455 | 95 | 0.609416 |
acf2de03347393bf462bcf8d10d7f4ff2d64d86e | 2,579 | py | Python | test/functional/p2p_invalid_tx.py | ckti-wagerr-raspbian/wagerr | 026729ce50cdedaa3f7a3428efcf9cf382194b1f | [
"MIT"
] | 121 | 2017-05-28T20:54:55.000Z | 2021-10-09T02:57:22.000Z | test/functional/p2p_invalid_tx.py | ckti-wagerr-raspbian/wagerr | 026729ce50cdedaa3f7a3428efcf9cf382194b1f | [
"MIT"
] | 123 | 2017-06-19T23:07:50.000Z | 2021-03-23T07:38:46.000Z | test/functional/p2p_invalid_tx.py | ckti-wagerr-raspbian/wagerr | 026729ce50cdedaa3f7a3428efcf9cf382194b1f | [
"MIT"
] | 124 | 2018-02-17T02:28:40.000Z | 2021-11-20T10:17:56.000Z | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import ComparisonTestFramework
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
'''
In this test we connect to one node over p2p, and test tx requests.
'''
# Use the ComparisonTestFramework with 1 node: only use --testbinary.
class InvalidTxRequestTest(ComparisonTestFramework):
'''
Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison.
'''
def __init__(self):
super().__init__()
self.num_nodes = 1
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
self.tip = None
self.block_time = None
NetworkThread().start() # Start up network handling in another thread
test.run()
def get_tests(self):
if self.tip is None:
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.block_time = int(time.time())+1
'''
Create a new block with an anyone-can-spend coinbase
'''
height = 1
block = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
'''
Now we need that block to mature so we can spend the coinbase.
'''
test = TestInstance(sync_every_block=False)
for i in range(100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.tip = block.sha256
self.block_time += 1
test.blocks_and_transactions.append([block, True])
height += 1
yield test
# b'\x64' is OP_NOTIF
# Transaction will be rejected with code 16 (REJECT_INVALID)
tx1 = create_transaction(self.block1.vtx[0], 0, b'\x64', 50 * COIN - 12000)
yield TestInstance([[tx1, RejectResult(16, b'mandatory-script-verify-flag-failed')]])
# TODO: test further transactions...
if __name__ == '__main__':
InvalidTxRequestTest().main()
| 34.386667 | 93 | 0.649089 |
acf2df83edd85ad4ed0f85cdd00cd0d40bf8d90e | 9,150 | py | Python | trabalho_atr_pt_1.py | juliarezender/pythonthread | 87bfc7f4bdce387e4c114a7df3e6aa744d35ece1 | [
"Apache-2.0"
] | null | null | null | trabalho_atr_pt_1.py | juliarezender/pythonthread | 87bfc7f4bdce387e4c114a7df3e6aa744d35ece1 | [
"Apache-2.0"
] | null | null | null | trabalho_atr_pt_1.py | juliarezender/pythonthread | 87bfc7f4bdce387e4c114a7df3e6aa744d35ece1 | [
"Apache-2.0"
] | null | null | null | import threading
import time
import math
import logging
from simple_pid import PID
import socket
import os
def calculo_altura_integral_tanque_1():
"""
Função que integra o valor retornado pela função calculo diferencial tanque 1, para obter o valor
de altura no instante futuro, e assim controlar esse valor posteriormente na função controlador.
Para integrar tal valor, utiliza-se o metodo de integração Runge-Kutta.
"""
global altura_integral_1
global tempo_1
variacao_tempo = 0.2
while True:
mutex.acquire()
# Calcular passos parciais
k1 = calculo_diferencial_tanque_1(tempo_1, altura_integral_1)
k2 = calculo_diferencial_tanque_1(
tempo_1 + variacao_tempo / 2, altura_integral_1 + variacao_tempo * k1 / 2
)
k3 = calculo_diferencial_tanque_1(
tempo_1 + variacao_tempo / 2, altura_integral_1 + variacao_tempo * k2 / 2
)
k4 = calculo_diferencial_tanque_1(
tempo_1 + variacao_tempo, altura_integral_1 + variacao_tempo * k3
)
# Calcular media ponderada dos passos parciais
altura_integral_1 = variacao_tempo / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
tempo_1 += variacao_tempo
mutex.release()
# Thread se repete a cada 200 ms
time.sleep(0.2)
def calculo_altura_integral_tanque_2():
"""
Função que integra o valor retornado pela função calculo diferencial tanque 1, para obter o valor
de altura no instante futuro, e assim controlar esse valor posteriormente na função controlador.
Para integrar tal valor, utiliza-se o metodo de integração Runge-Kutta.
"""
global altura_integral_2
global tempo_2
variacao_tempo = 0.2
while True:
mutex.acquire()
# Calcular passos parciais
k1 = calculo_diferencial_tanque_2(tempo_2, altura_integral_2)
k2 = calculo_diferencial_tanque_2(
tempo_2 + variacao_tempo / 2, altura_integral_2 + variacao_tempo * k1 / 2
)
k3 = calculo_diferencial_tanque_2(
tempo_2 + variacao_tempo / 2, altura_integral_2 + variacao_tempo * k2 / 2
)
k4 = calculo_diferencial_tanque_2(
tempo_2 + variacao_tempo, altura_integral_2 + variacao_tempo * k3
)
# Calcular media ponderada dos passos parciais
altura_integral_2 = variacao_tempo / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
tempo_2 += variacao_tempo
mutex.release()
# Thread se repete a cada 200 ms
time.sleep(0.2)
def calculo_diferencial_tanque_1(tempo, altura_integral_1):
"""
Função que calcula a altura diferencial do tanque 1, de acordo com a formula disponibilizada
na descrição do trabalho
"""
global altura_controlada_1
global q_output_tanque_1
global q_input_tanque_2
global altura_derivada_1
global q_input_tanque_1
R = 4
r = 2
H = 4
coeficiente = 2
altura_controlada_1 = abs(math.sin(tempo)) # função senoidal
q_output_tanque_1 = coeficiente * math.sqrt(altura_controlada_1)
altura_derivada_1 = (
q_input_tanque_1
- q_output_tanque_1
- q_input_tanque_2 / (math.pi * (r + ((R - r) / H) * altura_controlada_1) ** 2)
)
return altura_derivada_1
def calculo_diferencial_tanque_2(tempo, altura_integral_2):
"""
Função que calcula a altura diferencial do tanque 2, de acordo com a formula disponibilizada
na descrição do trabalho
"""
global altura_controlada_2
global q_output_tanque_2
global q_input_tanque_2
R = 3
r = 1
H = 3
coeficiente = 2
altura_controlada_2 = abs(math.sin(tempo)) # função senoidal
q_output_tanque_2 = coeficiente * math.sqrt(altura_controlada_2)
altura_derivada_2 = q_input_tanque_2 - q_output_tanque_2 / (
math.pi * (r + ((R - r) / H) * altura_controlada_2) ** 2
)
return altura_derivada_2
def controlador():
"""
Função que implementa o controlador PID e calcula a entrada do tanque 1 e a entrada do
tanque 2 com base na altura obtida atravez da integralização.
"""
global altura_integral_2
global q_input_tanque_2
global altura_integral_1
global q_input_tanque_1
global altura_desejada_tanque_1
global altura_desejada_tanque_2
HOST = '127.0.0.1' # The server's hostname or IP address
PORT = 65434 # The port used by the server
while True:
mutex.acquire()
pid_1 = PID(Kp=1, Ki=0.1, Kd=0.05, setpoint=altura_desejada_tanque_1)
pid_2 = PID(Kp=1, Ki=0.1, Kd=0.05, setpoint=altura_desejada_tanque_2)
q_input_tanque_2 = pid_2(altura_integral_2)
q_input_tanque_1 = pid_1(altura_integral_1)
message = [
"Altura do tanque 1: {} Vazao do tanque 1: {}, Valor setado pelo user: {}".format(
altura_integral_1, q_input_tanque_1, altura_desejada_tanque_1
),
" ",
"Altura do tanque 2: {} Vazao do tanque 2: {}, Valor setado pelo user: {}".format(
altura_integral_2, q_input_tanque_2, altura_desejada_tanque_2
),
]
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
for item in message:
s.sendall(item.encode())
mutex.release()
# Thread se repete a cada 100 ms, ou seja, com o dobro da frenquencia das outras threads
time.sleep(0.1)
def log_informacao():
while True:
logging.basicConfig(
filename="log.txt",
level="DEBUG",
format="%(levelname)s:%(asctime)s:%(message)s",
)
mutex.acquire()
logger = logging.getLogger()
logger.debug(
"Altura tanque 1: {}, entrada tanque 1: {}".format(altura_integral_1, q_input_tanque_1)
)
logger.debug(
"Altura tanque 2: {}, entrada tanque 2: {}".format(altura_integral_2, q_input_tanque_2)
)
mutex.release()
time.sleep(0.1)
def selecionar_altura_tanque():
global altura_desejada_tanque_1
global altura_desejada_tanque_2
while True:
alturas_a_serem_modificadas = input("Voce deseja modificar quantos valores de referencia? Escolha 0, 1 ou 2 ")
mutex.acquire()
if alturas_a_serem_modificadas == "1":
unica_altura_a_mudar = input("Qual altura vc deseja mudar? 1 ou 2")
if unica_altura_a_mudar == "1":
altura_desejada_tanque_1 = float(int(input("Selecione valor de 0 a 4 para a altura do tanque 1:")))
elif unica_altura_a_mudar == "2":
altura_desejada_tanque_2 = float(int(input("Selecione valor de 0 a 3 para a altura do tanque 2:")))
else:
print("Selecione um valor valido: 1 ou 2")
elif alturas_a_serem_modificadas == "2":
altura_desejada_tanque_1 = float(int(input("Selecione valor de 0 a 4 para a altura do tanque 1:")))
altura_desejada_tanque_2 = float(int(input("Selecione valor de 0 a 3 para a altura do tanque 2:")))
elif alturas_a_serem_modificadas == "0":
pass
else:
print("Selecione um valor valido: 0, 1 ou 2")
mutex.release()
time.sleep(2)
def servidor():
HOST = '127.0.0.1' # Standard loopback interface address (localhost)
PORT = 65434 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
print ('Socket binded to port', PORT)
s.listen(3)
print ('socket is listening')
text_file = open("historiador.txt", "w")
while True:
conn, addr = s.accept()
# print('Connected by', addr)
data = conn.recv(1024)
# print(data)
text_file.write(str(data))
text_file.write("\n")
def parentchild():
n = os.fork()
if n == 0:
# Criando as threads
process_thread_1 = threading.Thread(target=calculo_altura_integral_tanque_1)
process_thread_2 = threading.Thread(target=calculo_altura_integral_tanque_2)
softPLC = threading.Thread(target=controlador)
logger_thread = threading.Thread(target=log_informacao)
interface_thread = threading.Thread(target=selecionar_altura_tanque)
# Começando novas Threads
process_thread_1.start()
process_thread_2.start()
softPLC.start()
logger_thread.start()
interface_thread.start()
threads = []
threads.append(process_thread_1)
threads.append(process_thread_2)
threads.append(softPLC)
threads.append(logger_thread)
threads.append(interface_thread)
for t in threads:
t.join()
print("Fim....")
else:
servidor()
mutex = threading.Lock()
altura_integral_1 = 0
altura_integral_2 = 0
q_input_tanque_1 = 2
q_input_tanque_2 = 2
tempo_2 = 0
tempo_1 = 0
altura_desejada_tanque_1 = 2
altura_desejada_tanque_2 = 1
parentchild() | 32.105263 | 118 | 0.64306 |
acf2df8861081ac3fb6ee1654f96b95165ce86bb | 4,764 | py | Python | tests/components/omnilogic/test_config_flow.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 6 | 2017-08-02T19:26:39.000Z | 2020-03-14T22:47:41.000Z | tests/components/omnilogic/test_config_flow.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 60 | 2020-08-03T07:32:56.000Z | 2022-03-31T06:02:07.000Z | tests/components/omnilogic/test_config_flow.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 14 | 2018-08-19T16:28:26.000Z | 2021-09-02T18:26:53.000Z | """Test the Omnilogic config flow."""
from unittest.mock import patch
from omnilogic import LoginException, OmniLogicException
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.omnilogic.const import DOMAIN
from tests.common import MockConfigEntry
DATA = {"username": "test-username", "password": "test-password"}
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.omnilogic.config_flow.OmniLogic.connect",
return_value=True,
), patch(
"homeassistant.components.omnilogic.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.omnilogic.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Omnilogic"
assert result2["data"] == DATA
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_already_configured(hass):
"""Test config flow when Omnilogic component is already setup."""
MockConfigEntry(domain="omnilogic", data=DATA).add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "single_instance_allowed"
async def test_with_invalid_credentials(hass):
"""Test with invalid credentials."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=LoginException,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test if invalid response or no connection returned from Hayward."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=OmniLogicException,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_with_unknown_error(hass):
"""Test with unknown error response from Hayward."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=Exception,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "unknown"}
async def test_option_flow(hass):
"""Test option flow."""
entry = MockConfigEntry(domain=DOMAIN, data=DATA)
entry.add_to_hass(hass)
assert not entry.options
with patch(
"homeassistant.components.omnilogic.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_init(
entry.entry_id,
data=None,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"polling_interval": 9},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == ""
assert result["data"]["polling_interval"] == 9
| 31.973154 | 81 | 0.673804 |
acf2df9f040150c32e1ac8e0eef035cb2840fa93 | 4,045 | py | Python | pydhsfw/messages.py | tetrahedron-technologies/pydhsfw | 79180afce1ce7c8804fec2276c41e7247040022b | [
"MIT"
] | 1 | 2020-10-22T22:20:48.000Z | 2020-10-22T22:20:48.000Z | pydhsfw/messages.py | tetrahedron-technologies/pydhsfw | 79180afce1ce7c8804fec2276c41e7247040022b | [
"MIT"
] | 27 | 2020-10-15T00:53:47.000Z | 2020-12-02T22:51:28.000Z | pydhsfw/messages.py | tetrahedron-technologies/pydhsfw | 79180afce1ce7c8804fec2276c41e7247040022b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from threading import Event
from collections import deque
from typing import Any, TypeVar, Generic
class MessageIn:
_type_id = None
@classmethod
def get_type_id(cls):
return cls._type_id
@classmethod
def parse(cls, buffer: Any):
pass
@staticmethod
def parse_type_id(cls, buffer: Any) -> Any:
pass
def __str__(self):
return self.get_type_id()
class MessageOut:
_type_id = None
@classmethod
def get_type_id(cls):
return cls._type_id
def write(self) -> Any:
pass
def __str__(self):
return self.get_type_id()
class MessageRegistry:
_registry = {}
@classmethod
def _register_message(cls, factory_name: str, msg_cls: MessageIn):
if cls._registry.get(factory_name) is None:
cls._registry[factory_name] = set()
cls._registry[factory_name].add(msg_cls)
@classmethod
def _get_factory_messages(cls, factory_name: str):
return cls._registry.get(factory_name, set())
def register_message(msg_type_id: str, factory_name: str = None):
"""Registers a MessageIn class with a message factory and assigns it a message type id..
msg_type_id - The message type id that uniquely identifies this message class. This decorator adds the message
type id to the class definition.
factory_name - The name of the message factory that will convert this message type id's raw messages in to the message instances.
"""
def decorator_register_message(cls):
cls._type_id = msg_type_id
if factory_name and issubclass(cls, MessageIn):
MessageRegistry._register_message(factory_name, cls)
return cls
return decorator_register_message
class MessageFactory:
def __init__(self, name: str = None):
self._name = name
self._msg_map = {}
self._register_messages()
def _get_msg_cls(self, type_id):
return self._msg_map.get(type_id)
def _register_message(self, msg_cls: MessageIn):
self._msg_map[msg_cls.get_type_id()] = msg_cls
def _register_messages(self):
for msg_cls in MessageRegistry._get_factory_messages(self.name):
if issubclass(msg_cls, MessageIn):
self._register_message(msg_cls)
def _create_message(self, type_id, raw_msg: Any):
msg_cls = self._get_msg_cls(type_id)
if msg_cls:
return msg_cls.parse(raw_msg)
def _parse_type_id(self, raw_msg: Any) -> Any:
return NotImplemented
@property
def name(self):
return self._name
def create_message(self, raw_msg: bytes) -> MessageIn:
"""Convert a raw message to a MessageIn subclass"""
type_id = self._parse_type_id(raw_msg)
return self._create_message(type_id, raw_msg)
T = TypeVar('T')
class Queue(Generic[T]):
def __init__(self):
pass
def queue(self, item: T):
pass
def fetch(self, timeout=None) -> T:
pass
def clear(self):
pass
class BlockingQueue(Queue[T]):
def __init__(self):
super().__init__()
self._deque = deque()
self._deque_event = Event()
def queue(self, item: T):
# Append message and unblock
self._deque.append(item)
self._deque_event.set()
def fetch(self, timeout=None) -> T:
item = None
# Block until items are available
if not self._deque_event.wait(timeout):
raise TimeoutError
elif self._deque:
item = self._deque.popleft()
# If there are no more items, start blocking again
if not self._deque:
self._deque_event.clear()
return item
def clear(self):
self._deque_event.clear()
self._deque.clear()
class IncomingMessageQueue(BlockingQueue[MessageIn]):
def __init__(self):
super().__init__()
class OutgoingMessageQueue(BlockingQueue[MessageOut]):
def __init__(self):
super().__init__()
| 23.517442 | 133 | 0.647219 |
acf2e024d1ccfdd1b7d1a7eb1508468db46ed88f | 170 | py | Python | BOJ/14000~14999/14468.py | shinkeonkim/today-ps | f3e5e38c5215f19579bb0422f303a9c18c626afa | [
"Apache-2.0"
] | 2 | 2020-01-29T06:54:41.000Z | 2021-11-07T13:23:27.000Z | BOJ/14000~14999/14468.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | BOJ/14000~14999/14468.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | answer = 0
l=[]
a=input()
for i,j in enumerate(a):
if j in l:
answer+=len(l)-1-l.index(j)
l.remove(j)
else:
l.append(j)
print(answer)
| 15.454545 | 35 | 0.505882 |
acf2e0615a480b785683023c457d20e508b73228 | 156 | py | Python | slackbot/bot_info.py | seung-lab/seuron | 81e462f8ef71f2e28b4c2ad4c835b27b251ae25a | [
"MIT"
] | null | null | null | slackbot/bot_info.py | seung-lab/seuron | 81e462f8ef71f2e28b4c2ad4c835b27b251ae25a | [
"MIT"
] | null | null | null | slackbot/bot_info.py | seung-lab/seuron | 81e462f8ef71f2e28b4c2ad4c835b27b251ae25a | [
"MIT"
] | 3 | 2018-12-20T16:46:38.000Z | 2022-03-02T18:49:39.000Z | from os import environ
slack_token = environ["SLACK_TOKEN"]
botid = "<@{}>".format(environ["BOTUSERID"])
workerid = "seuron-worker-"+environ["DEPLOYMENT"]
| 26 | 49 | 0.717949 |
acf2e0868555da0eb1c1cee7fb30b1e80783f1e1 | 1,517 | py | Python | src/python/cli_new/tests/main.py | em-mcg/cse223b-mesos | e7d0867d5bf67dbc28117f52babc73dd43d0bb4c | [
"Apache-2.0"
] | null | null | null | src/python/cli_new/tests/main.py | em-mcg/cse223b-mesos | e7d0867d5bf67dbc28117f52babc73dd43d0bb4c | [
"Apache-2.0"
] | 1 | 2022-01-17T12:25:46.000Z | 2022-01-17T12:25:46.000Z | src/python/cli_new/tests/main.py | em-mcg/cse223b-mesos | e7d0867d5bf67dbc28117f52babc73dd43d0bb4c | [
"Apache-2.0"
] | 1 | 2021-08-18T09:26:06.000Z | 2021-08-18T09:26:06.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is the main executable of the mesos-cli unit tests.
"""
import os
import unittest
from termcolor import colored
from cli.tests import CLITestCase
# pylint: disable=unused-import
# We import the tests that we want to run.
from cli.tests import TestAgentPlugin
from cli.tests import TestInfrastructure
from cli.tests import TestTaskPlugin
if __name__ == '__main__':
CLITestCase.MESOS_BUILD_DIR = CLITestCase.default_mesos_build_dir()
os.environ["MESOS_CLI_CONFIG"] = os.path.join(os.path.dirname(__file__),
"default_config.toml")
print colored("Running the Mesos CLI unit tests", "yellow")
unittest.main(verbosity=2, testRunner=unittest.TextTestRunner)
| 37 | 76 | 0.749506 |
acf2e08db31be2325173592a4e8dbf0824633aaf | 1,326 | py | Python | tests/benchmarks/constructs/LoopSmallXrange.py | sthagen/Nuitka-Nuitka | 023dc76eeafd9c53ee2a51931474ddd98a3ba083 | [
"Apache-2.0"
] | null | null | null | tests/benchmarks/constructs/LoopSmallXrange.py | sthagen/Nuitka-Nuitka | 023dc76eeafd9c53ee2a51931474ddd98a3ba083 | [
"Apache-2.0"
] | null | null | null | tests/benchmarks/constructs/LoopSmallXrange.py | sthagen/Nuitka-Nuitka | 023dc76eeafd9c53ee2a51931474ddd98a3ba083 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
x = 2
local_value = module_value1
# construct_begin
for x in xrange(local_value, local_value+3):
pass
# construct_end
import itertools
for x in itertools.repeat(None, 50000):
calledRepeatedly()
print("OK.")
| 30.837209 | 78 | 0.71267 |
acf2e1169d9e8c1ad571a18bffdc62c84ac2a845 | 7,196 | py | Python | mmcv/runner/epoch_based_runner.py | RyanXLi/mmcv | 993da2bbd7e03fd8cd304f0c5ca2faa691134732 | [
"Apache-2.0"
] | 1 | 2020-07-03T02:16:36.000Z | 2020-07-03T02:16:36.000Z | mmcv/runner/epoch_based_runner.py | RyanXLi/mmcv | 993da2bbd7e03fd8cd304f0c5ca2faa691134732 | [
"Apache-2.0"
] | null | null | null | mmcv/runner/epoch_based_runner.py | RyanXLi/mmcv | 993da2bbd7e03fd8cd304f0c5ca2faa691134732 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Open-MMLab. All rights reserved.
import os.path as osp
import platform
import shutil
import time
import warnings
import torch
import mmcv
from .base_runner import BaseRunner
from .builder import RUNNERS
from .checkpoint import save_checkpoint
from .utils import get_host_info
@RUNNERS.register_module()
class EpochBasedRunner(BaseRunner):
"""Epoch-based Runner.
This runner train models epoch by epoch.
"""
def run_iter(self, data_batch, train_mode, **kwargs):
if self.batch_processor is not None:
outputs = self.batch_processor(
self.model, data_batch, train_mode=train_mode, **kwargs)
elif train_mode:
outputs = self.model.train_step(data_batch, self.optimizer,
**kwargs)
else:
outputs = self.model.val_step(data_batch, self.optimizer, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('"batch_processor()" or "model.train_step()"'
'and "model.val_step()" must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'], outputs['num_samples'])
self.outputs = outputs
def train(self, data_loader, **kwargs):
self.model.train()
self.mode = 'train'
self.data_loader = data_loader
self._max_iters = self._max_epochs * len(self.data_loader)
self.call_hook('before_train_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_train_iter')
self.run_iter(data_batch, train_mode=True)
self.call_hook('after_train_iter')
self._iter += 1
self.call_hook('after_train_epoch')
self._epoch += 1
def val(self, data_loader, **kwargs):
self.model.eval()
self.mode = 'val'
self.data_loader = data_loader
self.call_hook('before_val_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_val_iter')
with torch.no_grad():
self.run_iter(data_batch, train_mode=False)
self.call_hook('after_val_iter')
self.call_hook('after_val_epoch')
def run(self, data_loaders, workflow, max_epochs=None, **kwargs):
"""Start running.
Args:
data_loaders (list[:obj:`DataLoader`]): Dataloaders for training
and validation.
workflow (list[tuple]): A list of (phase, epochs) to specify the
running order and epochs. E.g, [('train', 2), ('val', 1)] means
running 2 epochs for training and 1 epoch for validation,
iteratively.
"""
assert isinstance(data_loaders, list)
assert mmcv.is_list_of(workflow, tuple)
assert len(data_loaders) == len(workflow)
if max_epochs is not None:
warnings.warn(
'setting max_epochs in run is deprecated, '
'please set max_epochs in runner_config', DeprecationWarning)
self._max_epochs = max_epochs
assert self._max_epochs is not None, (
'max_epochs must be specified during instantiation')
for i, flow in enumerate(workflow):
mode, epochs = flow
if mode == 'train':
self._max_iters = self._max_epochs * len(data_loaders[i])
break
work_dir = self.work_dir if self.work_dir is not None else 'NONE'
self.logger.info('Start running, host: %s, work_dir: %s',
get_host_info(), work_dir)
self.logger.info('workflow: %s, max: %d epochs', workflow,
self._max_epochs)
self.call_hook('before_run')
while self.epoch < self._max_epochs:
for i, flow in enumerate(workflow):
mode, epochs = flow
if isinstance(mode, str): # self.train()
if not hasattr(self, mode):
raise ValueError(
f'runner has no method named "{mode}" to run an '
'epoch')
epoch_runner = getattr(self, mode)
else:
raise TypeError(
'mode in workflow must be a str, but got {}'.format(
type(mode)))
for _ in range(epochs):
if mode == 'train' and self.epoch >= self._max_epochs:
break
epoch_runner(data_loaders[i], **kwargs)
time.sleep(1) # wait for some hooks like loggers to finish
self.call_hook('after_run')
def save_checkpoint(self,
out_dir,
filename_tmpl='epoch_{}.pth',
save_optimizer=True,
meta=None,
create_symlink=True):
"""Save the checkpoint.
Args:
out_dir (str): The directory that checkpoints are saved.
filename_tmpl (str, optional): The checkpoint filename template,
which contains a placeholder for the epoch number.
Defaults to 'epoch_{}.pth'.
save_optimizer (bool, optional): Whether to save the optimizer to
the checkpoint. Defaults to True.
meta (dict, optional): The meta information to be saved in the
checkpoint. Defaults to None.
create_symlink (bool, optional): Whether to create a symlink
"latest.pth" to point to the latest checkpoint.
Defaults to True.
"""
if meta is None:
meta = dict(epoch=self.epoch + 1, iter=self.iter)
elif isinstance(meta, dict):
meta.update(epoch=self.epoch + 1, iter=self.iter)
else:
raise TypeError(
f'meta should be a dict or None, but got {type(meta)}')
if self.meta is not None:
meta.update(self.meta)
filename = filename_tmpl.format(self.epoch + 1)
filepath = osp.join(out_dir, filename)
optimizer = self.optimizer if save_optimizer else None
save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta)
# in some environments, `os.symlink` is not supported, you may need to
# set `create_symlink` to False
if create_symlink:
dst_file = osp.join(out_dir, 'latest.pth')
if platform.system() != 'Windows':
mmcv.symlink(filename, dst_file)
else:
shutil.copy(filename, dst_file)
@RUNNERS.register_module()
class Runner(EpochBasedRunner):
"""Deprecated name of EpochBasedRunner."""
def __init__(self, *args, **kwargs):
warnings.warn(
'Runner was deprecated, please use EpochBasedRunner instead')
super().__init__(*args, **kwargs)
| 39.322404 | 79 | 0.578933 |
acf2e1df28c6c7b3a2dd1d9aa9773e0cd6cc812f | 11,135 | py | Python | code/datasets.py | Amritds/AttnGAN | 806ae70142a699bfe384c4964be2f7fce2b83d29 | [
"MIT"
] | null | null | null | code/datasets.py | Amritds/AttnGAN | 806ae70142a699bfe384c4964be2f7fce2b83d29 | [
"MIT"
] | null | null | null | code/datasets.py | Amritds/AttnGAN | 806ae70142a699bfe384c4964be2f7fce2b83d29 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from nltk.tokenize import RegexpTokenizer
from collections import defaultdict
from AttnGAN.code.miscc.config import cfg
import torch
import torch.utils.data as data
from torch.autograd import Variable
import torchvision.transforms as transforms
import os
import sys
import numpy as np
import pandas as pd
from PIL import Image
import numpy.random as random
if sys.version_info[0] == 2:
import cPickle as pickle
else:
import pickle
def prepare_data(data):
imgs, captions, captions_lens, class_ids, keys = data
# sort data by the length in a decreasing order
sorted_cap_lens, sorted_cap_indices = \
torch.sort(captions_lens, 0, True)
real_imgs = []
for i in range(len(imgs)):
imgs[i] = imgs[i][sorted_cap_indices]
if cfg.CUDA:
real_imgs.append(Variable(imgs[i]).cuda())
else:
real_imgs.append(Variable(imgs[i]))
captions = captions[sorted_cap_indices].squeeze()
class_ids = class_ids[sorted_cap_indices].numpy()
# sent_indices = sent_indices[sorted_cap_indices]
keys = [keys[i] for i in sorted_cap_indices.numpy()]
# print('keys', type(keys), keys[-1]) # list
if cfg.CUDA:
captions = Variable(captions).cuda()
sorted_cap_lens = Variable(sorted_cap_lens).cuda()
else:
captions = Variable(captions)
sorted_cap_lens = Variable(sorted_cap_lens)
return [real_imgs, captions, sorted_cap_lens,
class_ids, keys]
def get_imgs(img_path, imsize, bbox=None,
transform=None, normalize=None):
img = Image.open(img_path).convert('RGB')
width, height = img.size
if bbox is not None:
r = int(np.maximum(bbox[2], bbox[3]) * 0.75)
center_x = int((2 * bbox[0] + bbox[2]) / 2)
center_y = int((2 * bbox[1] + bbox[3]) / 2)
y1 = np.maximum(0, center_y - r)
y2 = np.minimum(height, center_y + r)
x1 = np.maximum(0, center_x - r)
x2 = np.minimum(width, center_x + r)
img = img.crop([x1, y1, x2, y2])
if transform is not None:
img = transform(img)
ret = []
if cfg.GAN.B_DCGAN:
ret = [normalize(img)]
else:
for i in range(cfg.TREE.BRANCH_NUM):
# print(imsize[i])
if i < (cfg.TREE.BRANCH_NUM - 1):
re_img = transforms.Scale(imsize[i])(img)
else:
re_img = img
ret.append(normalize(re_img))
return ret
class TextDataset(data.Dataset):
def __init__(self, data_dir, split='train',
base_size=64,
transform=None, target_transform=None):
self.transform = transform
self.norm = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
self.target_transform = target_transform
self.embeddings_num = cfg.TEXT.CAPTIONS_PER_IMAGE
self.imsize = []
for i in range(cfg.TREE.BRANCH_NUM):
self.imsize.append(base_size)
base_size = base_size * 2
self.data = []
self.data_dir = data_dir
if data_dir.find('birds') != -1:
self.bbox = self.load_bbox()
else:
self.bbox = None
split_dir = os.path.join(data_dir, split)
self.filenames, self.captions, self.ixtoword, \
self.wordtoix, self.n_words = self.load_text_data(data_dir, split)
self.class_id = self.load_class_id(split_dir, len(self.filenames))
self.number_example = len(self.filenames)
def load_bbox(self):
data_dir = self.data_dir
bbox_path = os.path.join(data_dir, 'CUB_200_2011/bounding_boxes.txt')
df_bounding_boxes = pd.read_csv(bbox_path,
delim_whitespace=True,
header=None).astype(int)
#
filepath = os.path.join(data_dir, 'CUB_200_2011/images.txt')
df_filenames = \
pd.read_csv(filepath, delim_whitespace=True, header=None)
filenames = df_filenames[1].tolist()
print('Total filenames: ', len(filenames), filenames[0])
#
filename_bbox = {img_file[:-4]: [] for img_file in filenames}
numImgs = len(filenames)
for i in xrange(0, numImgs):
# bbox = [x-left, y-top, width, height]
bbox = df_bounding_boxes.iloc[i][1:].tolist()
key = filenames[i][:-4]
filename_bbox[key] = bbox
#
return filename_bbox
def load_captions(self, data_dir, filenames):
all_captions = []
for i in range(len(filenames)):
cap_path = '%s/text/%s.txt' % (data_dir, filenames[i])
with open(cap_path, "r") as f:
captions = f.read().decode('utf8').split('\n')
cnt = 0
for cap in captions:
if len(cap) == 0:
continue
cap = cap.replace("\ufffd\ufffd", " ")
# picks out sequences of alphanumeric characters as tokens
# and drops everything else
tokenizer = RegexpTokenizer(r'\w+')
tokens = tokenizer.tokenize(cap.lower())
# print('tokens', tokens)
if len(tokens) == 0:
print('cap', cap)
continue
tokens_new = []
for t in tokens:
t = t.encode('ascii', 'ignore').decode('ascii')
if len(t) > 0:
tokens_new.append(t)
all_captions.append(tokens_new)
cnt += 1
if cnt == self.embeddings_num:
break
if cnt < self.embeddings_num:
print('ERROR: the captions for %s less than %d'
% (filenames[i], cnt))
return all_captions
def build_dictionary(self, train_captions, test_captions):
word_counts = defaultdict(float)
captions = train_captions + test_captions
for sent in captions:
for word in sent:
word_counts[word] += 1
vocab = [w for w in word_counts if word_counts[w] >= 0]
ixtoword = {}
ixtoword[0] = '<end>'
wordtoix = {}
wordtoix['<end>'] = 0
ix = 1
for w in vocab:
wordtoix[w] = ix
ixtoword[ix] = w
ix += 1
train_captions_new = []
for t in train_captions:
rev = []
for w in t:
if w in wordtoix:
rev.append(wordtoix[w])
# rev.append(0) # do not need '<end>' token
train_captions_new.append(rev)
test_captions_new = []
for t in test_captions:
rev = []
for w in t:
if w in wordtoix:
rev.append(wordtoix[w])
# rev.append(0) # do not need '<end>' token
test_captions_new.append(rev)
return [train_captions_new, test_captions_new,
ixtoword, wordtoix, len(ixtoword)]
def load_text_data(self, data_dir, split):
filepath = os.path.join(data_dir, 'captions.pickle')
train_names = self.load_filenames(data_dir, 'train')
test_names = self.load_filenames(data_dir, 'test')
if not os.path.isfile(filepath):
train_captions = self.load_captions(data_dir, train_names)
test_captions = self.load_captions(data_dir, test_names)
train_captions, test_captions, ixtoword, wordtoix, n_words = \
self.build_dictionary(train_captions, test_captions)
with open(filepath, 'wb') as f:
pickle.dump([train_captions, test_captions,
ixtoword, wordtoix], f, protocol=2)
print('Save to: ', filepath)
else:
with open(filepath, 'rb') as f:
x = pickle.load(f)
train_captions, test_captions = x[0], x[1]
ixtoword, wordtoix = x[2], x[3]
del x
n_words = len(ixtoword)
print('Load from: ', filepath)
if split == 'train':
# a list of list: each list contains
# the indices of words in a sentence
captions = train_captions
filenames = train_names
else: # split=='test'
captions = test_captions
filenames = test_names
return filenames, captions, ixtoword, wordtoix, n_words
def load_class_id(self, data_dir, total_num):
if os.path.isfile(data_dir + '/class_info.pickle'):
with open(data_dir + '/class_info.pickle', 'rb') as f:
class_id = pickle.load(f)
else:
class_id = np.arange(total_num)
return class_id
def load_filenames(self, data_dir, split):
filepath = '%s/%s/filenames.pickle' % (data_dir, split)
if os.path.isfile(filepath):
with open(filepath, 'rb') as f:
filenames = pickle.load(f)
print('Load filenames from: %s (%d)' % (filepath, len(filenames)))
else:
filenames = []
return filenames
def get_caption(self, sent_ix):
# a list of indices for a sentence
sent_caption = np.asarray(self.captions[sent_ix]).astype('int64')
if (sent_caption == 0).sum() > 0:
print('ERROR: do not need END (0) token', sent_caption)
num_words = len(sent_caption)
# pad with 0s (i.e., '<end>')
x = np.zeros((cfg.TEXT.WORDS_NUM, 1), dtype='int64')
x_len = num_words
if num_words <= cfg.TEXT.WORDS_NUM:
x[:num_words, 0] = sent_caption
else:
ix = list(np.arange(num_words)) # 1, 2, 3,..., maxNum
np.random.shuffle(ix)
ix = ix[:cfg.TEXT.WORDS_NUM]
ix = np.sort(ix)
x[:, 0] = sent_caption[ix]
x_len = cfg.TEXT.WORDS_NUM
return x, x_len
def __getitem__(self, index):
#
key = self.filenames[index]
cls_id = self.class_id[index]
#
if self.bbox is not None:
bbox = self.bbox[key]
data_dir = '%s/CUB_200_2011' % self.data_dir
else:
bbox = None
data_dir = self.data_dir
#
img_name = '%s/images/%s.jpg' % (data_dir, key)
imgs = get_imgs(img_name, self.imsize,
bbox, self.transform, normalize=self.norm)
# random select a sentence
sent_ix = random.randint(0, self.embeddings_num)
new_sent_ix = index * self.embeddings_num + sent_ix
caps, cap_len = self.get_caption(new_sent_ix)
return imgs, caps, cap_len, cls_id, key
def __len__(self):
return len(self.filenames)
| 35.57508 | 78 | 0.555366 |
acf2e1e8628daf82b2eb11c05f55c7ccff151273 | 2,088 | py | Python | nuitka/utils/Jinja2.py | mikehaben69/Nuitka | 4c5161620ea8f0f1c93a1d6be79e7e6eda7161d4 | [
"Apache-2.0"
] | 5,421 | 2018-09-24T08:04:06.000Z | 2022-03-31T20:02:37.000Z | nuitka/utils/Jinja2.py | mikehaben69/Nuitka | 4c5161620ea8f0f1c93a1d6be79e7e6eda7161d4 | [
"Apache-2.0"
] | 1,348 | 2018-09-22T13:41:00.000Z | 2022-03-31T22:33:40.000Z | nuitka/utils/Jinja2.py | mikehaben69/Nuitka | 4c5161620ea8f0f1c93a1d6be79e7e6eda7161d4 | [
"Apache-2.0"
] | 396 | 2018-09-28T15:37:03.000Z | 2022-03-29T10:52:09.000Z | # Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Jinja folklore wrappers and handling of inline copy usage.
"""
from .Importing import importFromInlineCopy
environments = {}
def unlikely_if(value):
if value:
return "unlikely"
else:
return ""
def unlikely_or_likely_from(value):
if value:
return "unlikely"
else:
return "likely"
def getEnvironment(module_name):
if module_name not in environments:
# Import dependencies, sadly we get to manage this ourselves.
importFromInlineCopy("markupsafe", must_exist=True)
jinja2 = importFromInlineCopy("jinja2", must_exist=True)
import jinja2
env = jinja2.Environment(
loader=jinja2.PackageLoader(module_name, "templates"),
# extensions=["jinja2.ext.do"],
trim_blocks=True,
lstrip_blocks=True,
)
# For shared global functions.
env.globals.update(
{
"unlikely_if": unlikely_if,
"unlikely_or_likely_from": unlikely_or_likely_from,
}
)
env.undefined = jinja2.StrictUndefined
environments[module_name] = env
return environments[module_name]
def getTemplate(module_name, template_name):
return getEnvironment(module_name).get_template(template_name)
| 28.60274 | 78 | 0.666667 |
acf2e3c4728779a11eeec4ffb342f10e39fd8be0 | 9,035 | py | Python | lists/tests/test_views.py | Tawakalt/todo_list | 184293acf62771f60c6fdc46271634ae89684775 | [
"MIT"
] | null | null | null | lists/tests/test_views.py | Tawakalt/todo_list | 184293acf62771f60c6fdc46271634ae89684775 | [
"MIT"
] | 5 | 2020-06-06T01:03:12.000Z | 2022-02-10T10:01:49.000Z | lists/tests/test_views.py | Tawakalt/todo_list | 184293acf62771f60c6fdc46271634ae89684775 | [
"MIT"
] | 1 | 2020-01-20T12:44:56.000Z | 2020-01-20T12:44:56.000Z | from django.conf import settings
from django.contrib.auth import get_user_model
from django.http import HttpRequest
from django.test import TestCase
from django.utils.html import escape
from importlib import import_module
from lists.forms import (
DUPLICATE_ITEM_ERROR,
EMPTY_ITEM_ERROR,
ExistingListItemForm,
ItemForm
)
from lists.models import Item, List
from lists.views import new_list
from unittest.mock import patch
import unittest
User = get_user_model()
# Create your tests here.
class HomePageTest(TestCase):
def test_home_page_returns_correct_html(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'home.html')
def test_homepage_uses_item_form(self):
response = self.client.get('/')
self.assertIsInstance(response.context['form'], ItemForm)
class ListViewTest(TestCase):
def test_uses_list_template(self):
list_ = List.objects.create()
response = self.client.get(f'/lists/{list_.id}/')
self.assertTemplateUsed(response, 'list.html')
def test_passes_correct_list_to_template(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.get(f'/lists/{correct_list.id}/')
self.assertEqual(response.context['list'], correct_list)
def test_displays_only_items_for_that_list(self):
correct_list = List.objects.create()
Item.objects.create(text='Item 1', list=correct_list)
Item.objects.create(text='Item 2', list=correct_list)
other_list = List.objects.create()
Item.objects.create(text='Other List Item 1', list=other_list)
Item.objects.create(text='Other List Item 2', list=other_list)
response = self.client.get(f'/lists/{correct_list.id}/')
self.assertContains(response, 'Item 1')
self.assertContains(response, 'Item 2')
self.assertNotContains(response, 'Other List Item 1')
self.assertNotContains(response, 'Other List Item 2')
def test_can_save_a_POST_request_to_an_existing_list(self):
other_list = List.objects.create()
correct_list = List.objects.create()
self.client.post(
f'/lists/{correct_list.id}/',
data={'text': 'A new item for an existing list'}
)
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new item for an existing list')
self.assertEqual(new_item.list, correct_list)
def test_POST_redirects_to_list_view(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.post(
f'/lists/{correct_list.id}/',
data={'text': 'A new item for an existing list'}
)
self.assertRedirects(response, f'/lists/{correct_list.id}/')
def post_invalid_input(self):
list_ = List.objects.create()
return self.client.post(
f'/lists/{list_.id}/',
data={'text': ''}
)
def test_for_invalid_input_nothing_saved_to_db(self):
self.post_invalid_input()
self.assertEqual(Item.objects.count(), 0)
def test_for_invalid_input_renders_list_template(self):
response = self.post_invalid_input()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'list.html')
def test_for_invalid_input_passes_form_to_template(self):
response = self.post_invalid_input()
self.assertIsInstance(response.context['form'], ExistingListItemForm)
def test_for_invalid_input_shows_error_on_page(self):
response = self.post_invalid_input()
self.assertContains(response, escape(EMPTY_ITEM_ERROR))
def test_displays_item_form(self):
list_ = List.objects.create()
response = self.client.get(f'/lists/{list_.id}/')
self.assertIsInstance(response.context['form'], ExistingListItemForm)
self.assertContains(response, 'name="text"')
def test_duplicate_item_validation_errors_end_up_on_lists_page(self):
list1 = List.objects.create()
item1 = Item.objects.create(list=list1, text='one')
response = self.client.post(
f'/lists/{list1.id}/',
data={'text': 'one'}
)
expected_error = escape(DUPLICATE_ITEM_ERROR)
self.assertContains(response, expected_error)
self.assertTemplateUsed(response, 'list.html')
self.assertEqual(Item.objects.all().count(), 1)
class NewListViewIntegratedTest(TestCase):
def test_can_save_a_POST_request(self):
self.client.post('/lists/new', data={'text':'A new list item'})
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new list item')
def test_redirects_after_POST(self):
response = self.client.post('/lists/new', data={'text':'A new list item'})
new_list = List.objects.first()
self.assertRedirects(response, f'/lists/{new_list.id}/')
def test_for_invalid_input_renders_home_template(self):
response = self.client.post('/lists/new', data={'text':''})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'home.html')
def test_validation_errors_are_shown_on_home_page(self):
response = self.client.post('/lists/new', data={'text':''})
self.assertContains(response, escape(EMPTY_ITEM_ERROR))
def test_for_invalid_input_passes_form_to_template(self):
response = self.client.post('/lists/new', data={'text':''})
self.assertIsInstance(response.context['form'], ItemForm)
def test_invalid_list_items_are_not_saved(self):
self.client.post('/lists/new', data={'text':''})
self.assertEqual(List.objects.count(), 0)
self.assertEqual(Item.objects.count(), 0)
def test_list_owner_is_saved_if_user_is_authenticated(self):
user = User.objects.create(email='ab@c.com')
self.client.force_login(user)
self.client.post('/lists/new', data={'text':'new item'})
list_ = List.objects.first()
self.assertEqual(list_.owner, user)
class MyListsTest(TestCase):
def test_my_lists_url_renders_my_lists_template(self):
User.objects.create(email='ab@c.com')
response = self.client.get(f'/lists/users/ab@c.com/')
self.assertTemplateUsed(response, 'my_lists.html')
def test_passes_correct_owner_to_template(self):
User.objects.create(email='wrongowner@a.com')
correct_user = User.objects.create(email='ab@c.com')
response = self.client.get(f'/lists/users/ab@c.com/')
self.assertEqual(response.context['owner'], correct_user)
@patch('lists.views.NewListForm')
class NewListViewUnitTest(unittest.TestCase):
def setUp(self):
self.request = HttpRequest()
self.engine = import_module(settings.SESSION_ENGINE)
self.session_key = None
self.request.user = self.engine.SessionStore(self.session_key)
self.request.POST['text'] = 'new list item'
@patch('lists.views.redirect')
def test_passes_POST_data_to_NewListForm(
self, mock_redirect, mockNewListForm
):
response = new_list(self.request)
self.assertEqual(response, mock_redirect.return_value)
mockNewListForm.assert_called_once_with(data=self.request.POST)
@patch('lists.views.redirect')
def test_saves_form_with_owner_if_form_valid(
self, mock_redirect, mockNewListForm
):
mock_form = mockNewListForm.return_value
mock_form.is_valid.return_value = True
response = new_list(self.request)
self.assertEqual(response, mock_redirect.return_value)
mockNewListForm.assert_called_once_with(data=self.request.POST)
@patch('lists.views.redirect')
def test_redirects_to_form_returned_object_if_form_valid(
self, mock_redirect, mockNewListForm
):
mock_form = mockNewListForm.return_value
mock_form.is_valid.return_value = True
response = new_list(self.request)
self.assertEqual(response, mock_redirect.return_value)
mock_redirect.assert_called_once_with(mock_form.save.return_value)
@patch('lists.views.render')
def test_renders_home_template_with_form_if_form_invalid(
self, mock_render, mockNewListForm
):
mock_form = mockNewListForm.return_value
mock_form.is_valid.return_value = False
response = new_list(self.request)
self.assertEqual(response, mock_render.return_value)
mock_render.assert_called_once_with(
self.request, 'home.html', {'form': mock_form, 'error': EMPTY_ITEM_ERROR}
)
def test_does_not_save_if_form_invalid(
self, mockNewListForm
):
mock_form = mockNewListForm.return_value
mock_form.is_valid.return_value = False
self.assertFalse(mock_form.save.called)
| 36.431452 | 85 | 0.687659 |
acf2e411686b0b980ad3f1376869c9b7f3db1727 | 15,162 | py | Python | plugins/modules/oci_object_storage_retention_rule.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_retention_rule.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_retention_rule.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_object_storage_retention_rule
short_description: Manage a RetentionRule resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a RetentionRule resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new retention rule in the specified bucket. The new rule will take effect typically within 30 seconds.
Note that a maximum of 100 rules are supported on a bucket.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
namespace_name:
description:
- The Object Storage namespace used for the request.
type: str
required: true
bucket_name:
description:
- "The name of the bucket. Avoid entering confidential information.
Example: `my-new-bucket1`"
type: str
required: true
display_name:
description:
- A user-specified name for the retention rule. Names can be helpful in identifying retention rules.
Avoid entering confidential information.
- Required for create, update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
duration:
description:
- ""
- This parameter is updatable.
type: dict
suboptions:
time_amount:
description:
- The timeAmount is interpreted in units defined by the timeUnit parameter, and is calculated in relation
to each object's Last-Modified timestamp.
type: int
required: true
time_unit:
description:
- The unit that should be used to interpret timeAmount.
type: str
choices:
- "YEARS"
- "DAYS"
required: true
time_rule_locked:
description:
- The date and time as per L(RFC 3339,https://tools.ietf.org/html/rfc3339) after which this rule is locked
and can only be deleted by deleting the bucket. Once a rule is locked, only increases in the duration are
allowed and no other properties can be changed. This property cannot be updated for rules that are in a
locked state. Specifying it when a duration is not specified is considered an error.
- This parameter is updatable.
type: str
retention_rule_id:
description:
- The ID of the retention rule.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the RetentionRule.
- Use I(state=present) to create or update a RetentionRule.
- Use I(state=absent) to delete a RetentionRule.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource ]
"""
EXAMPLES = """
- name: Create retention_rule
oci_object_storage_retention_rule:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
# optional
display_name: sampleRetentionRule
duration:
# required
time_amount: 30
time_unit: DAYS
time_rule_locked: 2019-12-13T17:23:46.000Z
- name: Update retention_rule
oci_object_storage_retention_rule:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
retention_rule_id: "ocid1.retentionrule.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: sampleRetentionRule
duration:
# required
time_amount: 30
time_unit: DAYS
time_rule_locked: 2019-12-13T17:23:46.000Z
- name: Update retention_rule using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_object_storage_retention_rule:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
display_name: sampleRetentionRule
# optional
duration:
# required
time_amount: 30
time_unit: DAYS
time_rule_locked: 2019-12-13T17:23:46.000Z
- name: Delete retention_rule
oci_object_storage_retention_rule:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
retention_rule_id: "ocid1.retentionrule.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
- name: Delete retention_rule using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_object_storage_retention_rule:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
display_name: sampleRetentionRule
state: absent
"""
RETURN = """
retention_rule:
description:
- Details of the RetentionRule resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- Unique identifier for the retention rule.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- User specified name for the retention rule.
returned: on success
type: str
sample: display_name_example
duration:
description:
- ""
returned: on success
type: complex
contains:
time_amount:
description:
- The timeAmount is interpreted in units defined by the timeUnit parameter, and is calculated in relation
to each object's Last-Modified timestamp.
returned: on success
type: int
sample: 56
time_unit:
description:
- The unit that should be used to interpret timeAmount.
returned: on success
type: str
sample: YEARS
etag:
description:
- The entity tag (ETag) for the retention rule.
returned: on success
type: str
sample: etag_example
time_rule_locked:
description:
- The date and time as per L(RFC 3339,https://tools.ietf.org/html/rfc3339) after which this rule becomes locked.
and can only be deleted by deleting the bucket.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_created:
description:
- The date and time that the retention rule was created as per L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_modified:
description:
- The date and time that the retention rule was modified as per L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"duration": {
"time_amount": 56,
"time_unit": "YEARS"
},
"etag": "etag_example",
"time_rule_locked": "2013-10-20T19:20:30+01:00",
"time_created": "2013-10-20T19:20:30+01:00",
"time_modified": "2013-10-20T19:20:30+01:00"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.object_storage import ObjectStorageClient
from oci.object_storage.models import CreateRetentionRuleDetails
from oci.object_storage.models import UpdateRetentionRuleDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class RetentionRuleHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_module_resource_id_param(self):
return "retention_rule_id"
def get_module_resource_id(self):
return self.module.params.get("retention_rule_id")
def get_get_fn(self):
return self.client.get_retention_rule
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_retention_rule,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
retention_rule_id=self.module.params.get("retention_rule_id"),
)
def get_required_kwargs_for_list(self):
required_list_method_params = [
"namespace_name",
"bucket_name",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
return dict()
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_retention_rules, **kwargs
)
def get_create_model_class(self):
return CreateRetentionRuleDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_retention_rule,
call_fn_args=(),
call_fn_kwargs=dict(
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
create_retention_rule_details=create_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.CREATE_OPERATION_KEY,
),
)
def get_update_model_class(self):
return UpdateRetentionRuleDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_retention_rule,
call_fn_args=(),
call_fn_kwargs=dict(
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
retention_rule_id=self.module.params.get("retention_rule_id"),
update_retention_rule_details=update_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.UPDATE_OPERATION_KEY,
),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_retention_rule,
call_fn_args=(),
call_fn_kwargs=dict(
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
retention_rule_id=self.module.params.get("retention_rule_id"),
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.DELETE_OPERATION_KEY,
),
)
RetentionRuleHelperCustom = get_custom_class("RetentionRuleHelperCustom")
class ResourceHelper(RetentionRuleHelperCustom, RetentionRuleHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=False
)
module_args.update(
dict(
namespace_name=dict(type="str", required=True),
bucket_name=dict(type="str", required=True),
display_name=dict(aliases=["name"], type="str"),
duration=dict(
type="dict",
options=dict(
time_amount=dict(type="int", required=True),
time_unit=dict(
type="str", required=True, choices=["YEARS", "DAYS"]
),
),
),
time_rule_locked=dict(type="str"),
retention_rule_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="retention_rule",
service_client_class=ObjectStorageClient,
namespace="object_storage",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 35.843972 | 140 | 0.640153 |
acf2e4c6641a4e0e7d1194bdf3578866036c9082 | 10,279 | py | Python | src/models/model_builder.py | dudeperf3ct/PreSumm | fbb2659888c88d833a22dd05c97420c4a4da105f | [
"MIT"
] | null | null | null | src/models/model_builder.py | dudeperf3ct/PreSumm | fbb2659888c88d833a22dd05c97420c4a4da105f | [
"MIT"
] | null | null | null | src/models/model_builder.py | dudeperf3ct/PreSumm | fbb2659888c88d833a22dd05c97420c4a4da105f | [
"MIT"
] | null | null | null | import copy
import torch
import torch.nn as nn
from pytorch_transformers import BertModel, BertConfig
from torch.nn.init import xavier_uniform_
from models.decoder import TransformerDecoder
from models.encoder import Classifier, ExtTransformerEncoder
from models.optimizers import Optimizer
def build_optim(args, model, checkpoint):
""" Build optimizer """
if checkpoint is not None:
optim = checkpoint['optim'][0]
saved_optimizer_state_dict = optim.optimizer.state_dict()
optim.optimizer.load_state_dict(saved_optimizer_state_dict)
if args.visible_gpus != '-1':
for state in optim.optimizer.state.values():
for k, v in state.items():
if torch.is_tensor(v):
state[k] = v.cuda()
if (optim.method == 'adam') and (len(optim.optimizer.state) < 1):
raise RuntimeError(
"Error: loaded Adam optimizer from existing model" +
" but optimizer state is empty")
else:
optim = Optimizer(
args.optim, args.lr, args.max_grad_norm,
beta1=args.beta1, beta2=args.beta2,
decay_method='noam',
warmup_steps=args.warmup_steps)
optim.set_parameters(list(model.named_parameters()))
return optim
def build_optim_bert(args, model, checkpoint):
""" Build optimizer """
if checkpoint is not None:
optim = checkpoint['optims'][0]
saved_optimizer_state_dict = optim.optimizer.state_dict()
optim.optimizer.load_state_dict(saved_optimizer_state_dict)
if args.visible_gpus != '-1':
for state in optim.optimizer.state.values():
for k, v in state.items():
if torch.is_tensor(v):
state[k] = v.cuda()
if (optim.method == 'adam') and (len(optim.optimizer.state) < 1):
raise RuntimeError(
"Error: loaded Adam optimizer from existing model" +
" but optimizer state is empty")
else:
optim = Optimizer(
args.optim, args.lr_bert, args.max_grad_norm,
beta1=args.beta1, beta2=args.beta2,
decay_method='noam',
warmup_steps=args.warmup_steps_bert)
params = [(n, p) for n, p in list(model.named_parameters()) if n.startswith('bert.model')]
optim.set_parameters(params)
return optim
def build_optim_dec(args, model, checkpoint):
""" Build optimizer """
if checkpoint is not None:
optim = checkpoint['optims'][1]
saved_optimizer_state_dict = optim.optimizer.state_dict()
optim.optimizer.load_state_dict(saved_optimizer_state_dict)
if args.visible_gpus != '-1':
for state in optim.optimizer.state.values():
for k, v in state.items():
if torch.is_tensor(v):
state[k] = v.cuda()
if (optim.method == 'adam') and (len(optim.optimizer.state) < 1):
raise RuntimeError(
"Error: loaded Adam optimizer from existing model" +
" but optimizer state is empty")
else:
optim = Optimizer(
args.optim, args.lr_dec, args.max_grad_norm,
beta1=args.beta1, beta2=args.beta2,
decay_method='noam',
warmup_steps=args.warmup_steps_dec)
params = [(n, p) for n, p in list(model.named_parameters()) if not n.startswith('bert.model')]
optim.set_parameters(params)
return optim
def get_generator(vocab_size, dec_hidden_size, device):
gen_func = nn.LogSoftmax(dim=-1)
generator = nn.Sequential(
nn.Linear(dec_hidden_size, vocab_size),
gen_func
)
generator.to(device)
return generator
class Bert(nn.Module):
def __init__(self, large, temp_dir, finetune=False):
super(Bert, self).__init__()
if(large):
self.model = BertModel.from_pretrained('bert-large-uncased', cache_dir=temp_dir)
else:
self.model = BertModel.from_pretrained('bert-base-uncased', cache_dir=temp_dir)
self.finetune = finetune
def forward(self, x, segs, mask):
if(self.finetune):
top_vec, _ = self.model(x, segs, attention_mask=mask)
else:
self.eval()
with torch.no_grad():
top_vec, _ = self.model(x, segs, attention_mask=mask)
return top_vec
class ExtSummarizer(nn.Module):
def __init__(self, args, device, checkpoint):
super(ExtSummarizer, self).__init__()
self.args = args
self.device = device
self.bert = Bert(args.large, args.temp_dir, args.finetune_bert)
self.ext_layer = ExtTransformerEncoder(self.bert.model.config.hidden_size, args.ext_ff_size, args.ext_heads,
args.ext_dropout, args.ext_layers)
if (args.encoder == 'baseline'):
bert_config = BertConfig(self.bert.model.config.vocab_size, hidden_size=args.ext_hidden_size,
num_hidden_layers=args.ext_layers, num_attention_heads=args.ext_heads, intermediate_size=args.ext_ff_size)
self.bert.model = BertModel(bert_config)
self.ext_layer = Classifier(self.bert.model.config.hidden_size)
if(args.max_pos>512):
my_pos_embeddings = nn.Embedding(args.max_pos, self.bert.model.config.hidden_size)
my_pos_embeddings.weight.data[:512] = self.bert.model.embeddings.position_embeddings.weight.data
my_pos_embeddings.weight.data[512:] = self.bert.model.embeddings.position_embeddings.weight.data[-1][None,:].repeat(args.max_pos-512,1)
self.bert.model.embeddings.position_embeddings = my_pos_embeddings
if checkpoint is not None:
self.load_state_dict(checkpoint['model'], strict=True)
else:
if args.param_init != 0.0:
for p in self.ext_layer.parameters():
p.data.uniform_(-args.param_init, args.param_init)
if args.param_init_glorot:
for p in self.ext_layer.parameters():
if p.dim() > 1:
xavier_uniform_(p)
self.to(device)
def forward(self, src, segs, clss, mask_src, mask_cls):
# import ipdb; ipdb.set_trace()
top_vec = self.bert(src, segs, mask_src)
sents_vec = top_vec[torch.arange(top_vec.size(0)).unsqueeze(1), clss]
sents_vec = sents_vec * mask_cls[:, :, None].float()
sent_scores = self.ext_layer(sents_vec, mask_cls).squeeze(-1)
return sent_scores, mask_cls
class AbsSummarizer(nn.Module):
def __init__(self, args, device, checkpoint=None, bert_from_extractive=None):
super(AbsSummarizer, self).__init__()
self.args = args
self.device = device
self.bert = Bert(args.large, args.temp_dir, args.finetune_bert)
if bert_from_extractive is not None:
self.bert.model.load_state_dict(
dict([(n[11:], p) for n, p in bert_from_extractive.items() if n.startswith('bert.model')]), strict=True)
if (args.encoder == 'baseline'):
bert_config = BertConfig(self.bert.model.config.vocab_size, hidden_size=args.enc_hidden_size,
num_hidden_layers=args.enc_layers, num_attention_heads=8,
intermediate_size=args.enc_ff_size,
hidden_dropout_prob=args.enc_dropout,
attention_probs_dropout_prob=args.enc_dropout)
self.bert.model = BertModel(bert_config)
if(args.max_pos>512):
my_pos_embeddings = nn.Embedding(args.max_pos, self.bert.model.config.hidden_size)
my_pos_embeddings.weight.data[:512] = self.bert.model.embeddings.position_embeddings.weight.data
my_pos_embeddings.weight.data[512:] = self.bert.model.embeddings.position_embeddings.weight.data[-1][None,:].repeat(args.max_pos-512,1)
self.bert.model.embeddings.position_embeddings = my_pos_embeddings
self.vocab_size = self.bert.model.config.vocab_size
tgt_embeddings = nn.Embedding(self.vocab_size, self.bert.model.config.hidden_size, padding_idx=0)
if (self.args.share_emb):
tgt_embeddings = self.bert.model.embeddings.word_embeddings
self.decoder = TransformerDecoder(
self.args.dec_layers,
self.args.dec_hidden_size, heads=self.args.dec_heads,
d_ff=self.args.dec_ff_size, dropout=self.args.dec_dropout, embeddings=tgt_embeddings)
self.generator = get_generator(self.vocab_size, self.args.dec_hidden_size, device)
self.generator[0].weight = self.decoder.embeddings.weight
if checkpoint is not None:
self.load_state_dict(checkpoint['model'], strict=True)
else:
for module in self.decoder.modules():
if isinstance(module, (nn.Linear, nn.Embedding)):
module.weight.data.normal_(mean=0.0, std=0.02)
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
for p in self.generator.parameters():
if p.dim() > 1:
xavier_uniform_(p)
else:
p.data.zero_()
if(args.use_bert_emb):
tgt_embeddings = nn.Embedding(self.vocab_size, self.bert.model.config.hidden_size, padding_idx=0)
tgt_embeddings.weight = copy.deepcopy(self.bert.model.embeddings.word_embeddings.weight)
self.decoder.embeddings = tgt_embeddings
self.generator[0].weight = self.decoder.embeddings.weight
self.to(device)
def forward(self, src, tgt, segs, clss, mask_src, mask_tgt, mask_cls):
top_vec = self.bert(src, segs, mask_src)
dec_state = self.decoder.init_decoder_state(src, top_vec)
decoder_outputs, state = self.decoder(tgt[:, :-1], top_vec, dec_state)
return decoder_outputs, None
| 41.955102 | 147 | 0.623504 |
acf2e7c37ead9adcf528e04dfd645504aa2439c0 | 890 | py | Python | VnfPackageSubscription/urls.py | p76081158/kube5gnfvo | 3db5db51603f52b73fce7d9740980552836299c6 | [
"Apache-2.0"
] | 51 | 2020-02-26T09:42:02.000Z | 2021-11-18T04:16:48.000Z | VnfPackageSubscription/urls.py | p76081158/kube5gnfvo | 3db5db51603f52b73fce7d9740980552836299c6 | [
"Apache-2.0"
] | 6 | 2020-05-22T02:54:24.000Z | 2022-03-26T06:14:55.000Z | VnfPackageSubscription/urls.py | p76081158/kube5gnfvo | 3db5db51603f52b73fce7d9740980552836299c6 | [
"Apache-2.0"
] | 55 | 2020-01-08T09:47:52.000Z | 2022-02-04T12:55:05.000Z | # All Rights Reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from VnfPackageSubscription import views
router = DefaultRouter()
router.register(r'subscriptions', views.VNFPackageSubscriptionViewSet)
urlpatterns = [
path('vnfpkgm/v1/', include(router.urls)),
]
| 35.6 | 78 | 0.749438 |
acf2e9406bdbbffa1a7d63daa8503c956d729897 | 40,038 | py | Python | app/navigation.py | McCloudA/notifications-admin | 99ab99a2324cff02f317796c6c67ca49a830552e | [
"MIT"
] | null | null | null | app/navigation.py | McCloudA/notifications-admin | 99ab99a2324cff02f317796c6c67ca49a830552e | [
"MIT"
] | null | null | null | app/navigation.py | McCloudA/notifications-admin | 99ab99a2324cff02f317796c6c67ca49a830552e | [
"MIT"
] | null | null | null | from itertools import chain
from flask import request
class Navigation:
mapping = {}
exclude = {}
selected_class = "selected"
def __init__(self):
self.mapping = {
navigation: {
# if not specified, assume endpoints are all in the `main` blueprint.
self._get_endpoint_with_blueprint(endpoint) for endpoint in endpoints
} for navigation, endpoints in self.mapping.items()
}
@property
def endpoints_with_navigation(self):
return tuple(chain.from_iterable((
endpoints
for navigation_item, endpoints in self.mapping.items()
)))
@property
def endpoints_without_navigation(self):
return tuple(
self._get_endpoint_with_blueprint(endpoint) for endpoint in self.exclude
) + ('static', 'status.show_status')
def is_selected(self, navigation_item):
if request.endpoint in self.mapping[navigation_item]:
return " " + self.selected_class
return ''
@staticmethod
def _get_endpoint_with_blueprint(endpoint):
return endpoint if '.' in endpoint else 'main.{}'.format(endpoint)
class HeaderNavigation(Navigation):
mapping = {
'support': {
'bat_phone',
'feedback',
'support',
'support_public',
'thanks',
'triage',
},
'features': {
'features',
'features_email',
'features_letters',
'features_sms',
'message_status',
'roadmap',
'security',
'terms',
'trial_mode_new',
'using_notify',
},
'pricing': {
'pricing',
'how_to_pay',
},
'documentation': {
'documentation',
'integration_testing',
},
'user-profile': {
'user_profile',
'user_profile_email',
'user_profile_email_authenticate',
'user_profile_email_confirm',
'user_profile_mobile_number',
'user_profile_mobile_number_authenticate',
'user_profile_mobile_number_confirm',
'user_profile_name',
'user_profile_password',
'user_profile_disable_platform_admin_view',
},
'platform-admin': {
'archive_user',
'clear_cache',
'create_email_branding',
'create_letter_branding',
'edit_sms_provider_ratio',
'email_branding',
'find_services_by_name',
'find_users_by_email',
'letter_branding',
'live_services',
'live_services_csv',
'notifications_sent_by_service',
'performance_platform_xlsx',
'usage_for_all_services',
'organisations',
'platform_admin',
'platform_admin_list_complaints',
'platform_admin_reports',
'platform_admin_returned_letters',
'platform_admin_splash_page',
'suspend_service',
'trial_services',
'update_email_branding',
'update_letter_branding',
'user_information',
'view_provider',
'view_providers',
},
'sign-in': {
'revalidate_email_sent',
'sign_in',
'two_factor',
'two_factor_email',
'two_factor_email_sent',
'two_factor_email_interstitial',
'verify',
'verify_email',
},
}
exclude = {
'accept_invite',
'accept_org_invite',
'action_blocked',
'add_data_retention',
'add_organisation',
'add_organisation_from_gp_service',
'add_organisation_from_nhs_local_service',
'add_service',
'add_service_template',
'api_callbacks',
'api_documentation',
'api_integration',
'api_keys',
'archive_service',
'branding_request',
'callbacks',
'cancel_invited_org_user',
'cancel_invited_user',
'cancel_job',
'cancel_letter',
'cancel_letter_job',
'check_and_resend_text_code',
'check_and_resend_verification_code',
'check_messages',
'no_cookie.check_messages_preview',
'check_notification',
'no_cookie.check_notification_preview',
'choose_account',
'choose_from_contact_list',
'choose_service',
'choose_template',
'choose_template_to_copy',
'confirm_edit_organisation_name',
'confirm_edit_user_email',
'confirm_edit_user_mobile_number',
'confirm_redact_template',
'conversation',
'conversation_reply',
'conversation_reply_with_template',
'conversation_updates',
'cookies',
'copy_template',
'create_api_key',
'data_retention',
'delete_service_template',
'delete_template_folder',
'delivery_and_failure',
'delivery_status_callback',
'design_content',
'download_contact_list',
'download_notifications_csv',
'edit_data_retention',
'edit_organisation_agreement',
'edit_organisation_crown_status',
'edit_organisation_domains',
'edit_organisation_email_branding',
'edit_organisation_letter_branding',
'edit_organisation_go_live_notes',
'edit_organisation_name',
'edit_organisation_type',
'edit_provider',
'edit_service_template',
'edit_template_postage',
'edit_user_org_permissions',
'edit_user_email',
'edit_user_mobile_number',
'edit_user_permissions',
'email_not_received',
'email_template',
'error',
'estimate_usage',
'forgot_password',
'get_example_csv',
'get_notifications_as_json',
'get_started',
'get_started_old',
'go_to_dashboard_after_tour',
'guidance_index',
'branding_and_customisation',
'create_and_send_messages',
'edit_and_format_messages',
'send_files_by_email',
'upload_a_letter',
'history',
'inbound_sms_admin',
'inbox',
'inbox_download',
'inbox_updates',
'index',
'information_risk_management',
'information_security',
'invite_org_user',
'invite_user',
'no_cookie.letter_branding_preview_image',
'letter_spec',
'letter_template',
'link_service_to_organisation',
'manage_org_users',
'manage_template_folder',
'manage_users',
'monthly',
'new_password',
'old_integration_testing',
'old_roadmap',
'old_service_dashboard',
'old_terms',
'old_using_notify',
'organisation_dashboard',
'organisation_trial_mode_services',
'organisation_settings',
'organisation_preview_email_branding',
'organisation_preview_letter_branding',
'privacy',
'public_agreement',
'public_download_agreement',
'received_text_messages_callback',
'redact_template',
'register',
'register_from_invite',
'register_from_org_invite',
'registration_continue',
'remove_user_from_organisation',
'remove_user_from_service',
'request_to_go_live',
'resend_email_link',
'resend_email_verification',
'resume_service',
'returned_letter_summary',
'returned_letters',
'returned_letters_report',
'revoke_api_key',
'robots',
'send_messages',
'send_notification',
'send_one_off',
'send_one_off_letter_address',
'send_one_off_step',
'send_test',
'no_cookie.send_test_preview',
'send_test_step',
'send_from_contact_list',
'send_uploaded_letter',
'service_add_email_reply_to',
'service_add_letter_contact',
'service_add_sms_sender',
'service_agreement',
'service_accept_agreement',
'service_confirm_agreement',
'service_confirm_delete_email_reply_to',
'service_confirm_delete_letter_contact',
'service_confirm_delete_sms_sender',
'service_dashboard',
'service_dashboard_updates',
'service_delete_email_reply_to',
'service_delete_letter_contact',
'service_delete_sms_sender',
'service_download_agreement',
'service_edit_email_reply_to',
'service_edit_letter_contact',
'service_edit_sms_sender',
'service_email_reply_to',
'service_letter_contact_details',
'service_make_blank_default_letter_contact',
'service_name_change',
'service_name_change_confirm',
'service_preview_email_branding',
'service_preview_letter_branding',
'service_set_auth_type',
'service_set_channel',
'send_files_by_email_contact_details',
'service_set_email_branding',
'service_set_inbound_number',
'service_set_inbound_sms',
'service_set_international_sms',
'service_set_letter_branding',
'service_set_letter_contact_block',
'service_set_letters',
'service_set_reply_to_email',
'service_set_sms_prefix',
'service_settings',
'service_sms_senders',
'service_switch_count_as_live',
'service_switch_live',
'service_set_permission',
'service_verify_reply_to_address',
'service_verify_reply_to_address_updates',
'services_or_dashboard',
'set_free_sms_allowance',
'set_sender',
'set_template_sender',
'show_accounts_or_dashboard',
'sign_out',
'start_job',
'start_tour',
'styleguide',
'submit_request_to_go_live',
'template_history',
'template_usage',
'trial_mode',
'upload_contact_list',
'check_contact_list',
'save_contact_list',
'contact_list',
'delete_contact_list',
'upload_letter',
'uploaded_letter_preview',
'uploads',
'usage',
'view_job',
'view_job_csv',
'view_job_updates',
'view_jobs',
'view_letter_notification_as_preview',
'no_cookie.view_letter_template_preview',
'view_letter_upload_as_preview',
'view_notification',
'view_notification_updates',
'view_notifications',
'view_notifications_csv',
'view_template',
'view_template_version',
'no_cookie.view_template_version_preview',
'view_template_versions',
'whitelist',
'who_its_for',
}
# header HTML now comes from GOVUK Frontend so requires a boolean, not an attribute
def is_selected(self, navigation_item):
return request.endpoint in self.mapping[navigation_item]
class MainNavigation(Navigation):
mapping = {
'dashboard': {
'conversation',
'inbox',
'monthly',
'returned_letter_summary',
'returned_letters',
'service_dashboard',
'template_usage',
'view_notification',
'view_notifications',
},
'templates': {
'action_blocked',
'add_service_template',
'check_messages',
'check_notification',
'choose_from_contact_list',
'choose_template',
'choose_template_to_copy',
'confirm_redact_template',
'conversation_reply',
'copy_template',
'delete_service_template',
'edit_service_template',
'edit_template_postage',
'manage_template_folder',
'send_messages',
'send_one_off',
'send_one_off_letter_address',
'send_one_off_step',
'send_test',
'no_cookie.send_test_preview',
'send_test_step',
'set_sender',
'set_template_sender',
'view_template',
'view_template_version',
'view_template_versions',
},
'uploads': {
'upload_contact_list',
'check_contact_list',
'save_contact_list',
'contact_list',
'delete_contact_list',
'upload_letter',
'uploaded_letter_preview',
'uploads',
'view_job',
'view_jobs',
},
'team-members': {
'confirm_edit_user_email',
'confirm_edit_user_mobile_number',
'edit_user_email',
'edit_user_mobile_number',
'edit_user_permissions',
'invite_user',
'manage_users',
'remove_user_from_service',
},
'usage': {
'usage',
},
'settings': {
'add_organisation_from_gp_service',
'add_organisation_from_nhs_local_service',
'branding_request',
'estimate_usage',
'link_service_to_organisation',
'request_to_go_live',
'service_add_email_reply_to',
'service_add_letter_contact',
'service_add_sms_sender',
'service_agreement',
'service_accept_agreement',
'service_confirm_agreement',
'service_confirm_delete_email_reply_to',
'service_confirm_delete_letter_contact',
'service_confirm_delete_sms_sender',
'service_edit_email_reply_to',
'service_edit_letter_contact',
'service_edit_sms_sender',
'service_email_reply_to',
'service_letter_contact_details',
'service_make_blank_default_letter_contact',
'service_name_change',
'service_name_change_confirm',
'service_preview_email_branding',
'service_preview_letter_branding',
'service_set_auth_type',
'service_set_channel',
'send_files_by_email_contact_details',
'service_set_email_branding',
'service_set_inbound_number',
'service_set_inbound_sms',
'service_set_international_sms',
'service_set_letter_contact_block',
'service_set_letters',
'service_set_reply_to_email',
'service_set_sms_prefix',
'service_verify_reply_to_address',
'service_verify_reply_to_address_updates',
'service_settings',
'service_sms_senders',
'set_free_sms_allowance',
'service_set_letter_branding',
'submit_request_to_go_live',
},
'api-integration': {
'api_callbacks',
'api_documentation',
'api_integration',
'api_keys',
'create_api_key',
'delivery_status_callback',
'received_text_messages_callback',
'revoke_api_key',
'whitelist',
},
}
exclude = {
'accept_invite',
'accept_org_invite',
'add_data_retention',
'add_organisation',
'add_service',
'archive_service',
'archive_user',
'bat_phone',
'callbacks',
'cancel_invited_org_user',
'cancel_invited_user',
'cancel_job',
'cancel_letter',
'cancel_letter_job',
'check_and_resend_text_code',
'check_and_resend_verification_code',
'no_cookie.check_messages_preview',
'no_cookie.check_notification_preview',
'choose_account',
'choose_service',
'clear_cache',
'confirm_edit_organisation_name',
'conversation_reply_with_template',
'conversation_updates',
'cookies',
'create_email_branding',
'create_letter_branding',
'data_retention',
'delete_template_folder',
'delivery_and_failure',
'design_content',
'documentation',
'download_contact_list',
'download_notifications_csv',
'edit_data_retention',
'edit_organisation_agreement',
'edit_organisation_crown_status',
'edit_organisation_email_branding',
'edit_organisation_domains',
'edit_organisation_go_live_notes',
'edit_organisation_letter_branding',
'edit_organisation_name',
'edit_organisation_type',
'edit_provider',
'edit_sms_provider_ratio',
'edit_user_org_permissions',
'email_branding',
'email_not_received',
'email_template',
'error',
'features',
'features_email',
'features_letters',
'features_sms',
'feedback',
'find_services_by_name',
'find_users_by_email',
'forgot_password',
'get_example_csv',
'get_notifications_as_json',
'get_started',
'get_started_old',
'go_to_dashboard_after_tour',
'guidance_index',
'branding_and_customisation',
'create_and_send_messages',
'edit_and_format_messages',
'send_files_by_email',
'upload_a_letter',
'history',
'how_to_pay',
'inbound_sms_admin',
'inbox_download',
'inbox_updates',
'index',
'information_risk_management',
'information_security',
'integration_testing',
'invite_org_user',
'letter_branding',
'no_cookie.letter_branding_preview_image',
'live_services',
'live_services_csv',
'letter_spec',
'letter_template',
'message_status',
'manage_org_users',
'new_password',
'notifications_sent_by_service',
'old_integration_testing',
'old_roadmap',
'old_service_dashboard',
'old_terms',
'old_using_notify',
'organisation_dashboard',
'organisation_trial_mode_services',
'organisation_preview_email_branding',
'organisation_preview_letter_branding',
'organisation_settings',
'organisations',
'performance_platform_xlsx',
'platform_admin',
'platform_admin_list_complaints',
'platform_admin_reports',
'platform_admin_returned_letters',
'platform_admin_splash_page',
'pricing',
'privacy',
'public_agreement',
'public_download_agreement',
'redact_template',
'register',
'register_from_invite',
'register_from_org_invite',
'registration_continue',
'remove_user_from_organisation',
'resend_email_link',
'resend_email_verification',
'resume_service',
'returned_letters_report',
'revalidate_email_sent',
'roadmap',
'robots',
'security',
'send_notification',
'send_from_contact_list',
'send_uploaded_letter',
'service_dashboard_updates',
'service_delete_email_reply_to',
'service_delete_letter_contact',
'service_delete_sms_sender',
'service_download_agreement',
'service_switch_count_as_live',
'service_switch_live',
'service_set_permission',
'services_or_dashboard',
'show_accounts_or_dashboard',
'sign_in',
'sign_out',
'start_job',
'start_tour',
'styleguide',
'support',
'support_public',
'suspend_service',
'template_history',
'terms',
'thanks',
'triage',
'trial_mode',
'trial_mode_new',
'trial_services',
'two_factor',
'two_factor_email',
'two_factor_email_sent',
'two_factor_email_interstitial',
'update_email_branding',
'update_letter_branding',
'usage_for_all_services',
'user_information',
'user_profile',
'user_profile_email',
'user_profile_email_authenticate',
'user_profile_email_confirm',
'user_profile_mobile_number',
'user_profile_mobile_number_authenticate',
'user_profile_mobile_number_confirm',
'user_profile_name',
'user_profile_password',
'user_profile_disable_platform_admin_view',
'using_notify',
'verify',
'verify_email',
'view_job_csv',
'view_job_updates',
'view_letter_notification_as_preview',
'no_cookie.view_letter_template_preview',
'view_letter_upload_as_preview',
'view_notification_updates',
'view_notifications_csv',
'view_provider',
'view_providers',
'no_cookie.view_template_version_preview',
'who_its_for',
}
class CaseworkNavigation(Navigation):
mapping = {
'send-one-off': {
'choose_from_contact_list',
'choose_template',
'send_one_off',
'send_one_off_letter_address',
'send_one_off_step',
'send_test',
'send_test_step',
},
'sent-messages': {
'view_notifications',
'view_notification',
},
'uploads': {
'view_jobs',
'view_job',
'upload_contact_list',
'check_contact_list',
'save_contact_list',
'contact_list',
'delete_contact_list',
'upload_letter',
'uploaded_letter_preview',
'uploads',
},
}
exclude = {
'accept_invite',
'accept_org_invite',
'action_blocked',
'add_data_retention',
'add_organisation',
'add_organisation_from_gp_service',
'add_organisation_from_nhs_local_service',
'add_service',
'add_service_template',
'api_callbacks',
'api_documentation',
'api_integration',
'api_keys',
'archive_service',
'archive_user',
'bat_phone',
'branding_request',
'callbacks',
'cancel_invited_org_user',
'cancel_invited_user',
'cancel_job',
'cancel_letter',
'cancel_letter_job',
'check_and_resend_text_code',
'check_and_resend_verification_code',
'check_messages',
'no_cookie.check_messages_preview',
'check_notification',
'no_cookie.check_notification_preview',
'choose_account',
'choose_service',
'choose_template_to_copy',
'clear_cache',
'edit_organisation_agreement',
'edit_organisation_crown_status',
'edit_organisation_domains',
'edit_organisation_email_branding',
'edit_organisation_go_live_notes',
'edit_organisation_letter_branding',
'confirm_edit_organisation_name',
'confirm_edit_user_email',
'confirm_edit_user_mobile_number',
'confirm_redact_template',
'conversation',
'conversation_reply',
'conversation_reply_with_template',
'conversation_updates',
'cookies',
'copy_template',
'create_api_key',
'create_email_branding',
'create_letter_branding',
'data_retention',
'delete_service_template',
'delete_template_folder',
'delivery_and_failure',
'delivery_status_callback',
'design_content',
'documentation',
'download_contact_list',
'download_notifications_csv',
'edit_data_retention',
'edit_organisation_agreement',
'edit_organisation_crown_status',
'edit_organisation_domains',
'edit_organisation_name',
'edit_organisation_type',
'edit_provider',
'edit_sms_provider_ratio',
'edit_service_template',
'edit_template_postage',
'edit_user_email',
'edit_user_mobile_number',
'edit_user_org_permissions',
'edit_user_permissions',
'email_branding',
'email_not_received',
'email_template',
'error',
'estimate_usage',
'features',
'features_email',
'features_letters',
'features_sms',
'feedback',
'find_services_by_name',
'find_users_by_email',
'forgot_password',
'get_example_csv',
'get_notifications_as_json',
'get_started',
'get_started_old',
'go_to_dashboard_after_tour',
'guidance_index',
'branding_and_customisation',
'create_and_send_messages',
'edit_and_format_messages',
'send_files_by_email',
'upload_a_letter',
'history',
'how_to_pay',
'inbound_sms_admin',
'inbox_download',
'inbox_updates',
'inbox',
'index',
'information_risk_management',
'information_security',
'integration_testing',
'invite_org_user',
'invite_user',
'no_cookie.letter_branding_preview_image',
'letter_branding',
'letter_spec',
'letter_template',
'link_service_to_organisation',
'live_services',
'live_services_csv',
'manage_org_users',
'manage_template_folder',
'manage_users',
'message_status',
'monthly',
'new_password',
'notifications_sent_by_service',
'old_integration_testing',
'old_roadmap',
'old_service_dashboard',
'old_terms',
'old_using_notify',
'organisation_dashboard',
'organisation_trial_mode_services',
'organisation_preview_email_branding',
'organisation_preview_letter_branding',
'organisation_settings',
'organisations',
'performance_platform_xlsx',
'platform_admin_list_complaints',
'platform_admin_reports',
'platform_admin_returned_letters',
'platform_admin',
'platform_admin_splash_page',
'pricing',
'privacy',
'public_agreement',
'public_download_agreement',
'received_text_messages_callback',
'redact_template',
'register_from_invite',
'register_from_org_invite',
'register',
'registration_continue',
'remove_user_from_organisation',
'remove_user_from_service',
'request_to_go_live',
'resend_email_link',
'resend_email_verification',
'resume_service',
'returned_letter_summary',
'returned_letters',
'returned_letters_report',
'revalidate_email_sent',
'revoke_api_key',
'roadmap',
'robots',
'security',
'send_messages',
'send_notification',
'no_cookie.send_test_preview',
'send_from_contact_list',
'send_uploaded_letter',
'service_add_email_reply_to',
'service_add_letter_contact',
'service_add_sms_sender',
'service_agreement',
'service_accept_agreement',
'service_confirm_agreement',
'service_confirm_delete_email_reply_to',
'service_confirm_delete_letter_contact',
'service_confirm_delete_sms_sender',
'service_dashboard',
'service_dashboard_updates',
'service_delete_email_reply_to',
'service_delete_letter_contact',
'service_delete_sms_sender',
'service_download_agreement',
'service_edit_email_reply_to',
'service_edit_letter_contact',
'service_edit_sms_sender',
'service_email_reply_to',
'service_letter_contact_details',
'service_make_blank_default_letter_contact',
'service_name_change',
'service_name_change_confirm',
'service_preview_email_branding',
'service_preview_letter_branding',
'service_set_auth_type',
'service_set_channel',
'send_files_by_email_contact_details',
'service_set_email_branding',
'service_set_inbound_number',
'service_set_inbound_sms',
'service_set_international_sms',
'service_set_letter_contact_block',
'service_set_letters',
'service_set_reply_to_email',
'service_set_sms_prefix',
'service_settings',
'service_sms_senders',
'service_switch_count_as_live',
'service_switch_live',
'service_set_permission',
'service_verify_reply_to_address',
'service_verify_reply_to_address_updates',
'services_or_dashboard',
'set_free_sms_allowance',
'service_set_letter_branding',
'set_sender',
'set_template_sender',
'show_accounts_or_dashboard',
'sign_in',
'sign_out',
'start_job',
'start_tour',
'styleguide',
'submit_request_to_go_live',
'support',
'support_public',
'suspend_service',
'template_history',
'template_usage',
'terms',
'thanks',
'triage',
'trial_mode',
'trial_mode_new',
'trial_services',
'two_factor',
'two_factor_email',
'two_factor_email_sent',
'two_factor_email_interstitial',
'update_email_branding',
'update_letter_branding',
'usage',
'usage_for_all_services',
'user_information',
'user_profile',
'user_profile_email',
'user_profile_email_authenticate',
'user_profile_email_confirm',
'user_profile_mobile_number',
'user_profile_mobile_number_authenticate',
'user_profile_mobile_number_confirm',
'user_profile_name',
'user_profile_password',
'user_profile_disable_platform_admin_view',
'using_notify',
'verify',
'verify_email',
'view_job_csv',
'view_job_updates',
'view_letter_notification_as_preview',
'no_cookie.view_letter_template_preview',
'view_letter_upload_as_preview',
'view_notification_updates',
'view_notifications_csv',
'view_provider',
'view_providers',
'view_template',
'view_template_version',
'no_cookie.view_template_version_preview',
'view_template_versions',
'whitelist',
'who_its_for',
}
class OrgNavigation(Navigation):
mapping = {
'dashboard': {
'organisation_dashboard',
},
'settings': {
'confirm_edit_organisation_name',
'edit_organisation_agreement',
'edit_organisation_crown_status',
'edit_organisation_domains',
'edit_organisation_email_branding',
'edit_organisation_letter_branding',
'edit_organisation_domains',
'edit_organisation_go_live_notes',
'edit_organisation_name',
'edit_organisation_type',
'organisation_preview_email_branding',
'organisation_preview_letter_branding',
'organisation_settings',
},
'team-members': {
'edit_user_org_permissions',
'invite_org_user',
'manage_org_users',
'remove_user_from_organisation',
},
'trial-services': {
'organisation_trial_mode_services',
}
}
exclude = {
'accept_invite',
'accept_org_invite',
'action_blocked',
'add_data_retention',
'add_organisation',
'add_organisation_from_gp_service',
'add_organisation_from_nhs_local_service',
'add_service',
'add_service_template',
'api_callbacks',
'api_documentation',
'api_integration',
'api_keys',
'archive_service',
'archive_user',
'bat_phone',
'branding_request',
'callbacks',
'cancel_invited_org_user',
'cancel_invited_user',
'cancel_job',
'cancel_letter',
'cancel_letter_job',
'check_and_resend_text_code',
'check_and_resend_verification_code',
'check_messages',
'no_cookie.check_messages_preview',
'check_notification',
'no_cookie.check_notification_preview',
'choose_account',
'choose_from_contact_list',
'choose_service',
'choose_template',
'choose_template_to_copy',
'clear_cache',
'confirm_edit_user_email',
'confirm_edit_user_mobile_number',
'confirm_redact_template',
'conversation',
'conversation_reply',
'conversation_reply_with_template',
'conversation_updates',
'cookies',
'copy_template',
'create_api_key',
'create_email_branding',
'create_letter_branding',
'data_retention',
'delete_service_template',
'delete_template_folder',
'delivery_and_failure',
'delivery_status_callback',
'design_content',
'documentation',
'download_contact_list',
'download_notifications_csv',
'edit_data_retention',
'edit_provider',
'edit_service_template',
'edit_sms_provider_ratio',
'edit_template_postage',
'edit_user_email',
'edit_user_mobile_number',
'edit_user_permissions',
'email_branding',
'email_not_received',
'email_template',
'error',
'estimate_usage',
'features',
'features_email',
'features_letters',
'features_sms',
'feedback',
'find_services_by_name',
'find_users_by_email',
'forgot_password',
'get_example_csv',
'get_notifications_as_json',
'get_started',
'get_started_old',
'go_to_dashboard_after_tour',
'guidance_index',
'branding_and_customisation',
'create_and_send_messages',
'edit_and_format_messages',
'send_files_by_email',
'upload_a_letter',
'history',
'how_to_pay',
'inbound_sms_admin',
'inbox',
'inbox_download',
'inbox_updates',
'index',
'information_risk_management',
'information_security',
'integration_testing',
'invite_user',
'letter_branding',
'no_cookie.letter_branding_preview_image',
'letter_spec',
'letter_template',
'link_service_to_organisation',
'live_services',
'live_services_csv',
'manage_template_folder',
'manage_users',
'message_status',
'monthly',
'new_password',
'notifications_sent_by_service',
'old_integration_testing',
'old_roadmap',
'old_service_dashboard',
'old_terms',
'old_using_notify',
'organisations',
'performance_platform_xlsx',
'platform_admin',
'platform_admin_list_complaints',
'platform_admin_reports',
'platform_admin_returned_letters',
'platform_admin_splash_page',
'pricing',
'privacy',
'public_agreement',
'public_download_agreement',
'received_text_messages_callback',
'redact_template',
'register',
'register_from_invite',
'register_from_org_invite',
'registration_continue',
'remove_user_from_service',
'request_to_go_live',
'resend_email_link',
'resend_email_verification',
'resume_service',
'returned_letter_summary',
'returned_letters',
'returned_letters_report',
'revalidate_email_sent',
'revoke_api_key',
'roadmap',
'robots',
'security',
'send_messages',
'send_notification',
'send_one_off',
'send_one_off_letter_address',
'send_one_off_step',
'send_test',
'no_cookie.send_test_preview',
'send_test_step',
'send_from_contact_list',
'send_uploaded_letter',
'service_add_email_reply_to',
'service_add_letter_contact',
'service_add_sms_sender',
'service_agreement',
'service_accept_agreement',
'service_confirm_agreement',
'service_confirm_delete_email_reply_to',
'service_confirm_delete_letter_contact',
'service_confirm_delete_sms_sender',
'service_dashboard',
'service_dashboard_updates',
'service_delete_email_reply_to',
'service_delete_letter_contact',
'service_delete_sms_sender',
'service_download_agreement',
'service_edit_email_reply_to',
'service_edit_letter_contact',
'service_edit_sms_sender',
'service_email_reply_to',
'service_letter_contact_details',
'service_make_blank_default_letter_contact',
'service_name_change',
'service_name_change_confirm',
'service_preview_email_branding',
'service_preview_letter_branding',
'service_set_auth_type',
'service_set_channel',
'send_files_by_email_contact_details',
'service_set_email_branding',
'service_set_inbound_number',
'service_set_inbound_sms',
'service_set_international_sms',
'service_set_letter_contact_block',
'service_set_letters',
'service_set_reply_to_email',
'service_set_sms_prefix',
'service_settings',
'service_sms_senders',
'service_switch_count_as_live',
'service_switch_live',
'service_set_permission',
'service_verify_reply_to_address',
'service_verify_reply_to_address_updates',
'services_or_dashboard',
'set_free_sms_allowance',
'service_set_letter_branding',
'set_sender',
'set_template_sender',
'show_accounts_or_dashboard',
'sign_in',
'sign_out',
'start_job',
'start_tour',
'styleguide',
'submit_request_to_go_live',
'support',
'support_public',
'suspend_service',
'template_history',
'template_usage',
'terms',
'thanks',
'triage',
'trial_mode',
'trial_mode_new',
'trial_services',
'two_factor',
'two_factor_email',
'two_factor_email_sent',
'two_factor_email_interstitial',
'update_email_branding',
'update_letter_branding',
'upload_contact_list',
'check_contact_list',
'save_contact_list',
'contact_list',
'delete_contact_list',
'upload_letter',
'uploaded_letter_preview',
'uploads',
'usage',
'usage_for_all_services',
'user_information',
'user_profile',
'user_profile_email',
'user_profile_email_authenticate',
'user_profile_email_confirm',
'user_profile_mobile_number',
'user_profile_mobile_number_authenticate',
'user_profile_mobile_number_confirm',
'user_profile_name',
'user_profile_password',
'user_profile_disable_platform_admin_view',
'using_notify',
'verify',
'verify_email',
'view_job',
'view_job_csv',
'view_job_updates',
'view_jobs',
'view_letter_notification_as_preview',
'no_cookie.view_letter_template_preview',
'view_letter_upload_as_preview',
'view_notification',
'view_notification_updates',
'view_notifications',
'view_notifications_csv',
'view_provider',
'view_providers',
'view_template',
'view_template_version',
'no_cookie.view_template_version_preview',
'view_template_versions',
'whitelist',
'who_its_for',
}
| 31.085404 | 87 | 0.60048 |
acf2ea1f3ef03681ac3e1fa3a6909c450b51ac0d | 1,824 | py | Python | aliyun-python-sdk-linkwan/aliyunsdklinkwan/request/v20190301/SubmitNodeTupleOrderRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-linkwan/aliyunsdklinkwan/request/v20190301/SubmitNodeTupleOrderRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-linkwan/aliyunsdklinkwan/request/v20190301/SubmitNodeTupleOrderRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdklinkwan.endpoint import endpoint_data
class SubmitNodeTupleOrderRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'LinkWAN', '2019-03-01', 'SubmitNodeTupleOrder','linkwan')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LoraVersion(self):
return self.get_query_params().get('LoraVersion')
def set_LoraVersion(self,LoraVersion):
self.add_query_param('LoraVersion',LoraVersion)
def get_TupleType(self):
return self.get_query_params().get('TupleType')
def set_TupleType(self,TupleType):
self.add_query_param('TupleType',TupleType)
def get_RequiredCount(self):
return self.get_query_params().get('RequiredCount')
def set_RequiredCount(self,RequiredCount):
self.add_query_param('RequiredCount',RequiredCount) | 36.48 | 87 | 0.770833 |
acf2eb809ca11c4472eef48ef46fd73696f6b286 | 25,407 | py | Python | recipes/views/__init__.py | marici/recipebook | b46e06bf955788462f659d923ef47e329c807f92 | [
"MIT"
] | 2 | 2017-06-04T11:30:04.000Z | 2017-06-21T20:17:34.000Z | recipes/views/__init__.py | marici/recipebook | b46e06bf955788462f659d923ef47e329c807f92 | [
"MIT"
] | null | null | null | recipes/views/__init__.py | marici/recipebook | b46e06bf955788462f659d923ef47e329c807f92 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
The MIT License
Copyright (c) 2009 Marici, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from django.conf import settings
from django.core import serializers
from django.core.urlresolvers import reverse
from django.http import (HttpResponse, Http404, HttpResponseRedirect,
HttpResponseForbidden,)
from django.template import loader, Context, RequestContext
from django.shortcuts import get_object_or_404, render_to_response
from django.contrib.auth.decorators import login_required
from django.utils import simplejson
from django.contrib import messages
from django.contrib.sites.models import Site
from maricilib.django.core.paginator import Paginator
from maricilib.django.shortcuts import render_to_response_of_class
from maricilib.django.decorators import getmethod, postmethod
from maricilib.django.apps.taskqueue.queue import get_taskqueue
from maricilib.django.apps.taskqueue.tasks import SendEmailTask
from recipes.models import (Recipe, Contest, User, UserProfile, Direction,
Comment, FavoriteRecipe)
from recipes import forms
def user_is_active_or_404(user):
if not user.is_active:
raise Http404
def show_recipe(request, recipe_id=None):
'''
指定されたIDのレシピ詳細ページを出力します。
レシピがis_draft == Trueの場合、作成したユーザのみ見ることができます。
@param recipe_id: RecipeインスタンスのID
@context recipe: Recipeインスタンス
(recipe.id == recipe_id)
@return: 403レスポンス (recipe.is_draft == True and
request.user != recipe.user の場合)
@return: 404レスポンス (指定されたIDのRecipeインスタンスが存在しない場合)
@return: 404レスポンス (指定レシピの作成ユーザがis_active=Falseの場合
ただしレシピが商品化決定している場合を除く)
@return: 200レスポンス (成功。詳細ページを表示)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
recipe_user = recipe.user
if not recipe.is_awarded:
user_is_active_or_404(recipe_user)
if recipe.is_draft and request.user != recipe_user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
recipe.set_request_user(request.user)
comment_form = None
profile = recipe_user.get_profile()
if not profile.deny_comment:
comment_form = forms.CommentForm()
submit_form = forms.SubmitToContestForm()
d = {'recipe': recipe, 'comment_form': comment_form,
'submit_form': submit_form}
return render_to_response('recipes/recipe.html',
d, RequestContext(request))
@getmethod
def show_recipe_for_print(request, recipe_id=None):
'''
指定されたIDのレシピ詳細ページ(印刷向け)を出力します。
条件はshow_recipeと同じです。
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if not recipe.is_awarded:
user_is_active_or_404(recipe.user)
if recipe.is_draft and request.user != recipe.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
recipe.set_request_user(request.user)
d = {'recipe': recipe}
return render_to_response('recipes/recipe_print.html',
d, RequestContext(request))
@getmethod
@login_required
def register_recipe(request, contest_id=None):
'''
レシピの新規作成フォームページを出力します。
@param contest_id: お題ID(なくても可)
@context form: NewRecipeFormインスタンス
@return: 302レスポンス (ログインしていない場合)
@return: 200レスポンス (成功。フォームを表示)
'''
form = forms.NewRecipeForm()
d = {'form': form}
return render_to_response('recipes/new_recipe_form.html',
d, RequestContext(request))
@postmethod
@login_required
def register_recipe(request, contest_id=None):
'''
レシピデータを新規作成します。
forms.NewRecipeFormで定義された値を受け取ります。
必須項目はNewRecipeFormを参照してください。
レシピの作成が成功した場合、編集ページにリダイレクトされます。
Contextで返されるRecipeインスタンスは以下の状態になっています。
name, introduction, feeling, photo: フォームから渡された値
user: ログインユーザ
is_draft: True
contest: 指定されたお題のID
@param contest_id: お題ID (optional)
@context form: NewRecipeFormインスタンス (フォームがinvalidの場合)
@return: 403レスポンス (TODO: 指定のお題が存在しない場合)
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 302レスポンス (レシピ編集ページへ。作成に成功した場合)
'''
form = forms.NewRecipeForm(request.POST, request.FILES)
if not form.is_valid():
return render_to_response('recipes/new_recipe_form.html',
{'form': form}, RequestContext(request))
recipe = form.save(commit=False)
recipe.user = request.user
if contest_id:
recipe.contest = Contest.objects.get(pk=contest_id)
recipe.save()
return HttpResponseRedirect(reverse('recipes-edit',
kwargs={'recipe_id': recipe.id}))
@postmethod
@login_required
def toggle_recipe_open_state(request, recipe_id=None):
'''
指定されたIDのレシピの公開状態を変更します。
レシピが下書き状態(is_draft==True)の場合、公開状態(is_draft==False)に変更します。
公開状態の場合、下書き状態に変更します。
レシピを作成したユーザだけが状態の変更を行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合
商品化が決定している場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (JSONが返される。成功した場合)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if request.user != recipe.user or recipe.is_awarded:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
recipe.toggle_open_state()
recipe.save()
data = serializers.serialize('json', [recipe]) # TODO: 情報を削減
return HttpResponse(data, mimetype='application/json')
def render_edit_recipe_page(request, recipe, form):
'''
レシピの編集画面を出力します。(この関数はビュー関数ではありません。)
'''
directions = recipe.ordered_directions()
d_form = forms.DirectionForm()
d = {'form': form,
'direction_form': d_form,
'recipe': recipe,
'directions': directions}
return render_to_response('recipes/edit_recipe_form.html',
d, RequestContext(request))
@getmethod
@login_required
def edit_recipe(request, recipe_id=None):
'''
指定されたIDのレシピの編集画面を表示します。
レシピを作成したユーザだけが表示を行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@context recipe: Recipeインスタンス
@context directions: クエリセット。recipe.direction_set.all()
@context form: RecipeFormインスタンス
@context direction_form: DirectionFormインスタンス
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合
一度でも公開したことがあり、既にお題にひもづいている場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (成功。フォームを表示)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if (request.user != recipe.user or \
recipe.is_awarded or \
(recipe.contest and recipe.published_at)):
return render_to_response_of_class(HttpResponseForbidden, '403.html')
form = forms.RecipeForm(instance=recipe)
return render_edit_recipe_page(request, recipe, form)
@postmethod
@login_required
def edit_recipe(request, recipe_id=None):
'''
指定されたIDのレシピデータの変更を行います。
レシピを作成したユーザだけが変更を行うことができます。(recipe.user==request.user)
forms.RecipeFormで定義された値を受け取ります。
また、複数の素材データ(食材: foodパラメータのリストと分量: quantityパラメータのリスト)を
JSON文字列にエンコードします。
素材データはrecipe.decode_ingredients()でタプル(食材名,分量)のリストに
復元することができます。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合
一度でも公開したことがあり、既にお題にひもづいている場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 302レスポンス (レシピ編集ページへ。成功した場合)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if (request.user != recipe.user or \
recipe.is_awarded or \
(recipe.contest and recipe.published_at)):
return render_to_response_of_class(HttpResponseForbidden, '403.html')
recipe.encode_ingredients(request.POST.getlist('food'),
request.POST.getlist('quantity'))
form = forms.RecipeForm(request.POST, request.FILES, instance=recipe)
if form.is_valid():
recipe = form.save()
messages.add_message(request, messages.INFO, u'レシピを保存しました。')
return HttpResponseRedirect(reverse('recipes-edit',
kwargs={'recipe_id': recipe.id}))
@postmethod
@login_required
def delete_recipe(request, recipe_id=None):
'''
指定されたIDのレシピを削除します。
レシピを作成したユーザだけが行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合
商品化が決定している場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 302レスポンス (POSTのredirect_pathの値、
またはsettings.LOGIN_REDIRECT_URLへ。成功した場合)
'''
redirect_path = request.POST.get('redirect_path',
settings.LOGIN_REDIRECT_URL)
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user != request.user or recipe.is_awarded:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
message = u'%s を削除しました。' % recipe.name
recipe.delete()
messages.add_message(request, messages.INFO, message)
return HttpResponseRedirect(redirect_path)
@postmethod
@login_required
def mail_recipe(request, recipe_id=None):
'''
指定されたIDのレシピの情報をPOSTのalter_emailで指定されたアドレスにメールで送信します。
ログインユーザだけが行うことができます。
alter_emailの値がprofile.alter_emailと異なる場合はprofile.alter_emailを変更します。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (レシピのJSONデータを返す。成功した場合)
'''
site = Site.objects.get_current()
recipe = get_object_or_404(Recipe, pk=recipe_id)
profile = request.user.get_profile()
email = request.POST.get('alter_email', profile.alter_email)
if email != profile.alter_email:
profile.alter_email = email
profile.save()
c = Context({'user': request.user, 'recipe': recipe})
t = loader.get_template('recipes/email/mail_recipe.txt')
subject = u'[%s] %s' % (site.name, recipe.name)
body = t.render(c)
task = SendEmailTask(dict(subject=subject, body=body,
from_address=settings.EMAIL_FROM,
to_list=[email]))
get_taskqueue().send_task(task, queue_name=settings.QUEUENAME_EMAIL)
json = serializers.serialize('json', [recipe]) # TODO: 情報削減
return HttpResponse(json, mimetype='application/json')
@postmethod
@login_required
def submit_recipe_to_contest(request, recipe_id):
'''
指定されたIDのレシピをPOSTのcontestの値で指定されたお題に投稿します。
レシピの作成ユーザだけが行うことができます。
成功後のレシピは以下の状態になります。
contest: 指定されたIDのContestインスタンス
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 403レスポンス (ログインユーザが作成者ではない場合)
@return: 403レスポンス (TODO: 指定のお題が存在しない場合)
@return: 200レスポンス (レシピのJSONデータを返す。成功した場合)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if request.user != recipe.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
contest_id = request.POST.get('contest', None)
if contest_id:
contest = Contest.objects.get(pk=contest_id)
recipe.contest = contest
recipe.save()
json = serializers.serialize('json', [contest])
else:
json = ''
return HttpResponse(json, mimetype='application/json')
def show_voting_users(request, recipe_id):
'''
指定されたIDのレシピに投票したユーザの一覧を表示します。
@param recipe_id: レシピID
@context users_and_profiles: ユーザとプロファイルを持つリスト
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 404レスポンス (指定レシピのユーザがis_active=Falseの場合)
@return: 200レスポンス (成功した場合)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
user_is_active_or_404(recipe.user)
user_ids = recipe.vote_set.filter(recipe=recipe).values('user_id').query
users = User.objects.filter(pk__in=user_ids)
u_and_p = UserProfile.objects.get_zipped_profiles(users,
users.values('pk').query)
links = [{'name': recipe.name,
'url': reverse('recipes-show', kwargs={'recipe_id': recipe.id})}]
d = {'users_and_profiles': u_and_p, 'links': links,
'title': u'%s に投票したメンバー' % recipe.name}
return render_to_response('recipes/users.html',
d, RequestContext(request))
@postmethod
def copy_recipe(request, recipe_id=None):
'''
指定されたIDのレシピをコピーして、ログインユーザのレシピを作成します。
作成者以外の場合、下書きをコピーすることはできません。
結果ステータスを含むJSONを返します。
@param recipe_id: レシピID
@return: 403レスポンス (ログインしていない場合)
@return: 403レスポンス (指定されたレシピが下書きであり、作成者がログインユーザではない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (成功。結果とレシピIDをJSONで出力)
'''
failure_json = simplejson.dumps({'status': 'failure'})
if request.user is None or not request.user.is_active:
return HttpResponse(failure_json, 'application/json', 403)
try:
recipe = Recipe.objects.get(pk=recipe_id)
except Recipe.DoesNotExist:
return HttpResponse(failure_json, 'application/json', 404)
if not recipe.user.is_active:
return HttpResponse(failure_json, 'application/json', 404)
if recipe.is_draft and recipe.user != request.user:
return HttpResponse(failure_json, 'application/json', 403)
new_recipe = recipe.copy(request.user)
json = simplejson.dumps({'status': 'success', 'recipe_id': new_recipe.pk})
return HttpResponse(json, 'application/json')
@postmethod
@login_required
def register_direction(request, recipe_id=None):
'''
指定されたIDのレシピに関する作り方データを新規作成します。
レシピを作成したユーザだけが作成を行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (成功。DirectionインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user != request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
form = forms.DirectionForm(request.POST, request.FILES)
if not form.is_valid():
json = simplejson.dumps({'status': 'error',
'message': 'form is not valid.'})
direction = None
else:
direction = form.save(commit=False)
recipe.add_direction(direction)
direction.save()
d = {'status': 'success',
'message': '',
'direction': {'pk': direction.id,
'fields': {'text': direction.text,
'photo': direction.photo and direction.photo.url or ''}}}
json = simplejson.dumps(d)
if direction.photo:
data = '<textarea>%s</textarea>' % json
else:
data = json
return HttpResponse(data, mimetype='text/html')
@postmethod
@login_required
def edit_direction(request, recipe_id=None, direction_id=None):
'''
指定されたIDの作り方データを変更します。
レシピを作成したユーザだけが行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合)
@return: 403レスポンス (指定の作り方が指定のレシピにひもづいていない場合)
@return: 200レスポンス (成功。DirectionインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user != request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
direction = get_object_or_404(Direction, pk=direction_id)
if recipe != direction.recipe:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
text = request.POST.get('text')
direction.text = text
direction.save()
json = serializers.serialize('json', [direction])
return HttpResponse(json, mimetype='application/json')
@postmethod
@login_required
def delete_direction(request, recipe_id=None, direction_id=None):
'''
指定されたIDの作り方データを削除します。
レシピを作成したユーザだけが行うことができます。(recipe.user==request.user)
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合)
@return: 403レスポンス (指定の作り方が指定のレシピにひもづいていない場合)
@return: 200レスポンス (成功。DirectionインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user != request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
direction = get_object_or_404(Direction, pk=direction_id)
if recipe != direction.recipe:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
json = serializers.serialize('json', [direction])
direction.delete()
return HttpResponse(json, mimetype='application/json')
@postmethod
@login_required
def sort_directions(request, recipe_id=None):
'''
指定されたIDのレシピにひもづく作り方データを並べ替えます。
レシピを作成したユーザだけが行うことができます。(recipe.user==request.user)
POSTのdirection[]の値リストに含まれる、作り方IDの順序通りになります。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザではない場合)
@return: 200レスポンス (成功。DirectionインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user != request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
direction_ids = request.POST.getlist('direction[]')
direction_dict = {}
for direction in recipe.direction_set.all():
direction_dict[direction.id] = direction
for i, direction_id in enumerate(direction_ids):
direction = direction_dict[int(direction_id)]
direction.number = i
direction.save()
json = simplejson.dumps(direction_ids)
return HttpResponse(json, mimetype='application/json')
@postmethod
@login_required
def add_favorite_recipe(request, recipe_id=None):
'''
指定されたIDのレシピをログインユーザのフェイバリットに登録します。
レシピを作成したユーザを除くユーザだけが作成を行うことができます。(recipe.user!=request.user)
JSONで返されるFavoriteRecipeインスタンスは以下の状態になっています。
user: ログインユーザ
recipe: 指定されたIDのRecipeインスタンス
@param recipe_id: レシピID
@context favorite_recipe: 作成されたFavoriteRecipeインスタンス
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定されたレシピの作成者がログインユーザの場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合、または作成者がis_active=Falseの場合)
@return: 200レスポンス (成功。FavoriteRecipeインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
user_is_active_or_404(recipe.user)
if recipe.user == request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
fav = recipe.favorite(request.user)
data = serializers.serialize('json', [fav])
return HttpResponse(data, mimetype='application/json')
@postmethod
@login_required
def remove_favorite_recipe(request, recipe_id=None):
'''
指定されたIDのレシピをログインユーザのフェイバリットから削除します。
該当のFavoriteRecipeインスタンスを作成したユーザだけが削除を行うことができます。
成功した場合、削除されたFavoriteRecipeインスタンスがJSONで返ります。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (ログインユーザがレシピの作成者である場合)
@return: 403レスポンス (ログインユーザが該当のFavoriteRecipeインスタンスを作成していない場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合)
@return: 200レスポンス (成功。FavoriteRecipeインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
if recipe.user == request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
try:
fav = FavoriteRecipe.objects.get(user=request.user, recipe=recipe)
data = serializers.serialize('json', [fav])
fav.delete()
except:
raise Http404
return HttpResponse(data, mimetype='application/json')
@postmethod
@login_required
def vote_to_recipe(request, recipe_id=None):
'''
指定されたIDのレシピに投票します。
レシピ作成者ではないログインユーザだけが行うことができます。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (ログインユーザがレシピの作成者である場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合、または作成者がis_active=Falseの場合)
@return: 200レスポンス (成功。VoteインスタンスをJSONで出力)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
user_is_active_or_404(recipe.user)
if recipe.user == request.user:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
vote = recipe.vote(request.user)
recipe.save()
data = serializers.serialize('json', [vote])
return HttpResponse(data, mimetype='application/json')
@postmethod
@login_required
def comment_to_recipe(request, recipe_id=None):
'''
指定されたIDのレシピにコメントします。
ログインユーザだけが行うことができます。
POSTする項目はCommentFormの定義に従います。
作成成功時、ログインユーザがレシピの作成者の場合は、is_moderatedはTrueになります。
それ以外の場合はFalseになり、レシピ閲覧画面に表示されません。
@param recipe_id: レシピID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (指定レシピの作成者がprofile.deny_comment == Trueの場合)
@return: 404レスポンス (指定されたIDのレシピが存在しない場合、または作成者がis_active=Falseの場合)
@return: 200レスポンス (not form.is_valid()の場合、フォームを再表示)
@return: 302レスポンス (成功。レシピ閲覧ページにリダイレクトする)
'''
recipe = get_object_or_404(Recipe, pk=recipe_id)
recipe_user = recipe.user
user_is_active_or_404(recipe_user)
profile = recipe_user.get_profile()
if profile.deny_comment:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
form = forms.CommentForm(request.POST, request.FILES)
if not form.is_valid():
message = u'コメント内容に問題があります。'
messages.add_message(request, messages.INFO, message)
d = {'recipe': recipe, 'comment_form': form,
'submit_form': forms.SubmitToContestForm()}
return render_to_response('recipes/recipe.html',
d, RequestContext(request))
comment = form.save(commit=False)
comment.user = request.user
comment.recipe = recipe
comment.owner = recipe_user
if recipe_user == request.user:
comment.approve()
message = u'コメントが登録されました。'
else:
message = u'コメントを受け付けました。%s さんが承認すると表示されます。' % recipe_user.first_name
comment.save()
messages.add_message(request, messages.INFO, message)
return HttpResponseRedirect(reverse('recipes-show',
kwargs={'recipe_id': recipe.id}))
@postmethod
@login_required
def delete_comment(request, comment_id):
'''
コメントを削除します。
削除できるのはコメントしたユーザかコメント先のレシピを作成したユーザだけです。
@param comment_id: コメントID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 404レスポンス (指定されたIDのコメントが存在しない場合)
@return: 403レスポンス (ログインユーザがレシピ、コメントの作成者でない場合)
@return: 200レスポンス (成功。コメントのJSONデータが返ります)
'''
comment = get_object_or_404(Comment, pk=comment_id)
if request.user not in (comment.owner, comment.user):
return render_to_response_of_class(HttpResponseForbidden, '403.html')
json = serializers.serialize('json', [comment])
if comment.is_moderated:
recipe = comment.recipe
recipe.delete_comment(comment)
recipe.save()
comment.delete()
return HttpResponse(json, mimetype='application/json')
@postmethod
@login_required
def approve_comment(request, comment_id):
'''
コメントを承認します。これにより他のユーザにコメントが見えるようになります。
承認できるのはコメント先のレシピを作成したユーザだけです。
@param comment_id: コメントID
@return: 302レスポンス (ログインページへ。ログインしていない場合)
@return: 403レスポンス (ログインユーザがレシピの作成者でない場合)
@return: 404レスポンス (指定されたIDのコメントが存在しない場合)
@return: 200レスポンス (成功。コメントのJSONデータを返す)
'''
comment = get_object_or_404(Comment, pk=comment_id)
if request.user != comment.owner:
return render_to_response_of_class(HttpResponseForbidden, '403.html')
comment.approve()
comment.save()
json = serializers.serialize('json', [comment])
return HttpResponse(json, mimetype='application/json')
def search_recipes(request, query=None, page=1):
'''
レシピを検索します。
@param query: 検索文字列
@param page: 表示ページ デフォルトは1
@context page_obj: object_listに結果を含むオブジェクト
@return: 200レスポンス (成功)
'''
per_page = 20
query = query or request.GET['query']
title = u'%s のレシピ検索結果' % query
queries = query.split()
recipes = Recipe.objects.search(queries, page=page, per_page=per_page)
page_obj = Paginator(recipes.get('object_list'), per_page).page(page)
links = [{'name': u'全体から検索',
'url': reverse('gp-search', kwargs={'query': query})}]
return render_to_response('recipes/recipes.html',
{'page_obj': page_obj,
'title': title,
'links': links},
RequestContext(request))
| 36.089489 | 79 | 0.70886 |
acf2ebc9f8bb2326a4340c13aebd56623544fcf4 | 20,599 | py | Python | dvc/repo/__init__.py | bgheneti/dvc | 387867fbbe5895cf0505aa1115ad875aa084f237 | [
"Apache-2.0"
] | null | null | null | dvc/repo/__init__.py | bgheneti/dvc | 387867fbbe5895cf0505aa1115ad875aa084f237 | [
"Apache-2.0"
] | null | null | null | dvc/repo/__init__.py | bgheneti/dvc | 387867fbbe5895cf0505aa1115ad875aa084f237 | [
"Apache-2.0"
] | null | null | null | import logging
import os
from contextlib import contextmanager
from functools import wraps
from funcy import cached_property, cat, first
from dvc.config import Config
from dvc.dvcfile import PIPELINE_FILE, Dvcfile, is_valid_filename
from dvc.exceptions import FileMissingError
from dvc.exceptions import IsADirectoryError as DvcIsADirectoryError
from dvc.exceptions import (
NoOutputOrStageError,
NotDvcRepoError,
OutputNotFoundError,
)
from dvc.ignore import CleanTree
from dvc.path_info import PathInfo
from dvc.repo.tree import RepoTree
from dvc.scm.tree import is_working_tree
from dvc.utils.fs import path_isin
from ..stage.exceptions import StageFileDoesNotExistError, StageNotFound
from ..utils import parse_target
from .graph import check_acyclic, get_pipeline, get_pipelines
logger = logging.getLogger(__name__)
def locked(f):
@wraps(f)
def wrapper(repo, *args, **kwargs):
with repo.lock, repo.state:
repo._reset()
ret = f(repo, *args, **kwargs)
# Our graph cache is no longer valid after we release the repo.lock
repo._reset()
return ret
return wrapper
class Repo:
DVC_DIR = ".dvc"
from dvc.repo.destroy import destroy
from dvc.repo.install import install
from dvc.repo.add import add
from dvc.repo.remove import remove
from dvc.repo.ls import ls
from dvc.repo.freeze import freeze, unfreeze
from dvc.repo.move import move
from dvc.repo.run import run
from dvc.repo.imp import imp
from dvc.repo.imp_url import imp_url
from dvc.repo.reproduce import reproduce
from dvc.repo.checkout import _checkout
from dvc.repo.push import push
from dvc.repo.fetch import _fetch
from dvc.repo.pull import pull
from dvc.repo.status import status
from dvc.repo.gc import gc
from dvc.repo.commit import commit
from dvc.repo.diff import diff
from dvc.repo.brancher import brancher
from dvc.repo.get import get
from dvc.repo.get_url import get_url
from dvc.repo.update import update
def __init__(self, root_dir=None, scm=None, rev=None):
from dvc.state import State, StateNoop
from dvc.lock import make_lock
from dvc.scm import SCM
from dvc.cache import Cache
from dvc.data_cloud import DataCloud
from dvc.repo.metrics import Metrics
from dvc.repo.plots import Plots
from dvc.repo.params import Params
from dvc.scm.tree import WorkingTree
from dvc.utils.fs import makedirs
from dvc.stage.cache import StageCache
if scm:
# use GitTree instead of WorkingTree as default repo tree instance
tree = scm.get_tree(rev)
self.root_dir = self.find_root(root_dir, tree)
self.scm = scm
self.tree = tree
self.state = StateNoop()
else:
root_dir = self.find_root(root_dir)
self.root_dir = os.path.abspath(os.path.realpath(root_dir))
self.dvc_dir = os.path.join(self.root_dir, self.DVC_DIR)
self.config = Config(self.dvc_dir)
if not scm:
no_scm = self.config["core"].get("no_scm", False)
self.scm = SCM(self.root_dir, no_scm=no_scm)
self.tree = WorkingTree(self.root_dir)
self.tmp_dir = os.path.join(self.dvc_dir, "tmp")
self.index_dir = os.path.join(self.tmp_dir, "index")
makedirs(self.index_dir, exist_ok=True)
hardlink_lock = self.config["core"].get("hardlink_lock", False)
self.lock = make_lock(
os.path.join(self.tmp_dir, "lock"),
tmp_dir=self.tmp_dir,
hardlink_lock=hardlink_lock,
friendly=True,
)
self.cache = Cache(self)
self.cloud = DataCloud(self)
if not scm:
# NOTE: storing state and link_state in the repository itself to
# avoid any possible state corruption in 'shared cache dir'
# scenario.
self.state = State(self.cache.local)
self.stage_cache = StageCache(self)
self.metrics = Metrics(self)
self.plots = Plots(self)
self.params = Params(self)
self._ignore()
@property
def tree(self):
return self._tree
@tree.setter
def tree(self, tree):
if is_working_tree(tree) or tree.tree_root == self.root_dir:
root = None
else:
root = self.root_dir
self._tree = (
tree if isinstance(tree, CleanTree) else CleanTree(tree, root)
)
# Our graph cache is no longer valid, as it was based on the previous
# tree.
self._reset()
def __repr__(self):
return f"{self.__class__.__name__}: '{self.root_dir}'"
@classmethod
def find_root(cls, root=None, tree=None):
root_dir = os.path.realpath(root or os.curdir)
if tree:
if tree.isdir(os.path.join(root_dir, cls.DVC_DIR)):
return root_dir
raise NotDvcRepoError(f"'{root}' does not contain DVC directory")
if not os.path.isdir(root_dir):
raise NotDvcRepoError(f"directory '{root}' does not exist")
while True:
dvc_dir = os.path.join(root_dir, cls.DVC_DIR)
if os.path.isdir(dvc_dir):
return root_dir
if os.path.ismount(root_dir):
break
root_dir = os.path.dirname(root_dir)
message = (
"you are not inside of a DVC repository "
"(checked up to mount point '{}')"
).format(root_dir)
raise NotDvcRepoError(message)
@classmethod
def find_dvc_dir(cls, root=None):
root_dir = cls.find_root(root)
return os.path.join(root_dir, cls.DVC_DIR)
@staticmethod
def init(root_dir=os.curdir, no_scm=False, force=False, subdir=False):
from dvc.repo.init import init
init(root_dir=root_dir, no_scm=no_scm, force=force, subdir=subdir)
return Repo(root_dir)
def unprotect(self, target):
return self.cache.local.tree.unprotect(PathInfo(target))
def _ignore(self):
flist = [self.config.files["local"], self.tmp_dir]
if path_isin(self.cache.local.cache_dir, self.root_dir):
flist += [self.cache.local.cache_dir]
self.scm.ignore_list(flist)
def get_stage(self, path=None, name=None):
if not path:
path = PIPELINE_FILE
logger.debug("Assuming '%s' to be a stage inside '%s'", name, path)
dvcfile = Dvcfile(self, path)
return dvcfile.stages[name]
def get_stages(self, path=None, name=None):
if not path:
path = PIPELINE_FILE
logger.debug("Assuming '%s' to be a stage inside '%s'", name, path)
if name:
return [self.get_stage(path, name)]
dvcfile = Dvcfile(self, path)
return list(dvcfile.stages.values())
def check_modified_graph(self, new_stages):
"""Generate graph including the new stage to check for errors"""
# Building graph might be costly for the ones with many DVC-files,
# so we provide this undocumented hack to skip it. See [1] for
# more details. The hack can be used as:
#
# repo = Repo(...)
# repo._skip_graph_checks = True
# repo.add(...)
#
# A user should care about not duplicating outs and not adding cycles,
# otherwise DVC might have an undefined behaviour.
#
# [1] https://github.com/iterative/dvc/issues/2671
if not getattr(self, "_skip_graph_checks", False):
self._collect_graph(self.stages + new_stages)
def _collect_inside(self, path, graph):
import networkx as nx
stages = nx.dfs_postorder_nodes(graph)
return [stage for stage in stages if path_isin(stage.path, path)]
def collect(
self, target=None, with_deps=False, recursive=False, graph=None
):
if not target:
return list(graph) if graph else self.stages
if recursive and os.path.isdir(target):
return self._collect_inside(
os.path.abspath(target), graph or self.graph
)
path, name = parse_target(target)
stages = self.get_stages(path, name)
if not with_deps:
return stages
res = set()
for stage in stages:
res.update(self._collect_pipeline(stage, graph=graph))
return res
def _collect_pipeline(self, stage, graph=None):
import networkx as nx
pipeline = get_pipeline(get_pipelines(graph or self.graph), stage)
return nx.dfs_postorder_nodes(pipeline, stage)
def _collect_from_default_dvcfile(self, target):
dvcfile = Dvcfile(self, PIPELINE_FILE)
if dvcfile.exists():
return dvcfile.stages.get(target)
def collect_granular(
self, target=None, with_deps=False, recursive=False, graph=None
):
"""
Priority is in the order of following in case of ambiguity:
- .dvc file or .yaml file
- dir if recursive and directory exists
- stage_name
- output file
"""
if not target:
return [(stage, None) for stage in self.stages]
file, name = parse_target(target)
stages = []
# Optimization: do not collect the graph for a specific target
if not file:
# parsing is ambiguous when it does not have a colon
# or if it's not a dvcfile, as it can be a stage name
# in `dvc.yaml` or, an output in a stage.
logger.debug(
"Checking if stage '%s' is in '%s'", target, PIPELINE_FILE
)
if not (recursive and os.path.isdir(target)):
stage = self._collect_from_default_dvcfile(target)
if stage:
stages = (
self._collect_pipeline(stage) if with_deps else [stage]
)
elif not with_deps and is_valid_filename(file):
stages = self.get_stages(file, name)
if not stages:
if not (recursive and os.path.isdir(target)):
try:
(out,) = self.find_outs_by_path(target, strict=False)
filter_info = PathInfo(os.path.abspath(target))
return [(out.stage, filter_info)]
except OutputNotFoundError:
pass
try:
stages = self.collect(target, with_deps, recursive, graph)
except StageFileDoesNotExistError as exc:
# collect() might try to use `target` as a stage name
# and throw error that dvc.yaml does not exist, whereas it
# should say that both stage name and file does not exist.
if file and is_valid_filename(file):
raise
raise NoOutputOrStageError(target, exc.file) from exc
except StageNotFound as exc:
raise NoOutputOrStageError(target, exc.file) from exc
return [(stage, None) for stage in stages]
def used_cache(
self,
targets=None,
all_branches=False,
with_deps=False,
all_tags=False,
all_commits=False,
remote=None,
force=False,
jobs=None,
recursive=False,
used_run_cache=None,
):
"""Get the stages related to the given target and collect
the `info` of its outputs.
This is useful to know what files from the cache are _in use_
(namely, a file described as an output on a stage).
The scope is, by default, the working directory, but you can use
`all_branches`/`all_tags`/`all_commits` to expand the scope.
Returns:
A dictionary with Schemes (representing output's location) mapped
to items containing the output's `dumpd` names and the output's
children (if the given output is a directory).
"""
from dvc.cache import NamedCache
cache = NamedCache()
for branch in self.brancher(
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
):
targets = targets or [None]
pairs = cat(
self.collect_granular(
target, recursive=recursive, with_deps=with_deps
)
for target in targets
)
suffix = f"({branch})" if branch else ""
for stage, filter_info in pairs:
used_cache = stage.get_used_cache(
remote=remote,
force=force,
jobs=jobs,
filter_info=filter_info,
)
cache.update(used_cache, suffix=suffix)
if used_run_cache:
used_cache = self.stage_cache.get_used_cache(
used_run_cache, remote=remote, force=force, jobs=jobs,
)
cache.update(used_cache)
return cache
def _collect_graph(self, stages):
"""Generate a graph by using the given stages on the given directory
The nodes of the graph are the stage's path relative to the root.
Edges are created when the output of one stage is used as a
dependency in other stage.
The direction of the edges goes from the stage to its dependency:
For example, running the following:
$ dvc run -o A "echo A > A"
$ dvc run -d A -o B "echo B > B"
$ dvc run -d B -o C "echo C > C"
Will create the following graph:
ancestors <--
|
C.dvc -> B.dvc -> A.dvc
| |
| --> descendants
|
------- pipeline ------>
|
v
(weakly connected components)
Args:
stages (list): used to build a graph, if None given, collect stages
in the repository.
Raises:
OutputDuplicationError: two outputs with the same path
StagePathAsOutputError: stage inside an output directory
OverlappingOutputPathsError: output inside output directory
CyclicGraphError: resulting graph has cycles
"""
import networkx as nx
from pygtrie import Trie
from dvc.exceptions import (
OutputDuplicationError,
StagePathAsOutputError,
OverlappingOutputPathsError,
)
G = nx.DiGraph()
stages = stages or self.stages
outs = Trie() # Use trie to efficiently find overlapping outs and deps
for stage in filter(bool, stages): # bug? not using it later
for out in stage.outs:
out_key = out.path_info.parts
# Check for dup outs
if out_key in outs:
dup_stages = [stage, outs[out_key].stage]
raise OutputDuplicationError(str(out), dup_stages)
# Check for overlapping outs
if outs.has_subtrie(out_key):
parent = out
overlapping = first(outs.values(prefix=out_key))
else:
parent = outs.shortest_prefix(out_key).value
overlapping = out
if parent and overlapping:
msg = (
"Paths for outs:\n'{}'('{}')\n'{}'('{}')\n"
"overlap. To avoid unpredictable behaviour, "
"rerun command with non overlapping outs paths."
).format(
str(parent),
parent.stage.addressing,
str(overlapping),
overlapping.stage.addressing,
)
raise OverlappingOutputPathsError(parent, overlapping, msg)
outs[out_key] = out
for stage in stages:
out = outs.shortest_prefix(PathInfo(stage.path).parts).value
if out:
raise StagePathAsOutputError(stage, str(out))
# Building graph
G.add_nodes_from(stages)
for stage in stages:
for dep in stage.deps:
if dep.path_info is None:
continue
dep_key = dep.path_info.parts
overlapping = list(n.value for n in outs.prefixes(dep_key))
if outs.has_subtrie(dep_key):
overlapping.extend(outs.values(prefix=dep_key))
G.add_edges_from((stage, out.stage) for out in overlapping)
check_acyclic(G)
return G
@cached_property
def graph(self):
return self._collect_graph(self.stages)
@cached_property
def pipelines(self):
return get_pipelines(self.graph)
@cached_property
def stages(self):
"""
Walks down the root directory looking for Dvcfiles,
skipping the directories that are related with
any SCM (e.g. `.git`), DVC itself (`.dvc`), or directories
tracked by DVC (e.g. `dvc add data` would skip `data/`)
NOTE: For large repos, this could be an expensive
operation. Consider using some memoization.
"""
return self._collect_stages()
@cached_property
def plot_templates(self):
from .plots.template import PlotTemplates
return PlotTemplates(self.dvc_dir)
def _collect_stages(self):
stages = []
outs = set()
for root, dirs, files in self.tree.walk(self.root_dir):
for file_name in filter(is_valid_filename, files):
new_stages = self.get_stages(os.path.join(root, file_name))
stages.extend(new_stages)
outs.update(
out.fspath
for stage in new_stages
for out in stage.outs
if out.scheme == "local"
)
dirs[:] = [d for d in dirs if os.path.join(root, d) not in outs]
return stages
def find_outs_by_path(self, path, outs=None, recursive=False, strict=True):
if not outs:
outs = [out for stage in self.stages for out in stage.outs]
abs_path = os.path.abspath(path)
path_info = PathInfo(abs_path)
match = path_info.__eq__ if strict else path_info.isin_or_eq
def func(out):
if out.scheme == "local" and match(out.path_info):
return True
if recursive and out.path_info.isin(path_info):
return True
return False
matched = list(filter(func, outs))
if not matched:
raise OutputNotFoundError(path, self)
return matched
def find_out_by_relpath(self, relpath):
path = os.path.join(self.root_dir, relpath)
(out,) = self.find_outs_by_path(path)
return out
def is_dvc_internal(self, path):
path_parts = os.path.normpath(path).split(os.path.sep)
return self.DVC_DIR in path_parts
@contextmanager
def open_by_relpath(self, path, remote=None, mode="r", encoding=None):
"""Opens a specified resource as a file descriptor"""
tree = RepoTree(self, stream=True)
path = os.path.join(self.root_dir, path)
try:
with self.state:
with tree.open(
os.path.join(self.root_dir, path),
mode=mode,
encoding=encoding,
remote=remote,
) as fobj:
yield fobj
except FileNotFoundError as exc:
raise FileMissingError(path) from exc
except IsADirectoryError as exc:
raise DvcIsADirectoryError(f"'{path}' is a directory") from exc
def close(self):
self.scm.close()
@locked
def checkout(self, *args, **kwargs):
return self._checkout(*args, **kwargs)
@locked
def fetch(self, *args, **kwargs):
return self._fetch(*args, **kwargs)
def _reset(self):
self.__dict__.pop("graph", None)
self.__dict__.pop("stages", None)
self.__dict__.pop("pipelines", None)
self.__dict__.pop("dvcignore", None)
| 33.824302 | 79 | 0.583232 |
acf2eeb3e69920dae1fae097481aa66f41b37ce2 | 14,100 | py | Python | TEST/BETAsite/pulse_generator/pulse_generator/gate_operation.py | takehuge/PYQUM | bfc9d9b1c2f4246c7aac3a371baaf587c99f8069 | [
"MIT"
] | null | null | null | TEST/BETAsite/pulse_generator/pulse_generator/gate_operation.py | takehuge/PYQUM | bfc9d9b1c2f4246c7aac3a371baaf587c99f8069 | [
"MIT"
] | null | null | null | TEST/BETAsite/pulse_generator/pulse_generator/gate_operation.py | takehuge/PYQUM | bfc9d9b1c2f4246c7aac3a371baaf587c99f8069 | [
"MIT"
] | null | null | null | # Numpy
#
from numpy import linspace, arange
# Numpy array
from numpy import array, append, zeros, ones, where
# Numpy common math function
from numpy import exp, sqrt, arctan2, cos, sin, angle, radians, sign, log, ceil
# Numpy constant
from numpy import pi
from pandas import infer_freq
def gaussianFunc (t, p):
# p[0]: amp
# p[1]: sigma
# p[2]: peak position
return p[0] *exp( -( (t-p[2]) /p[1] )**2 /2)
def derivativeGaussianFunc (t, p):
# p[0]: amp
# p[1]: sigma
# p[2]: peak position
if p[1] != 0. :
return -p[0] / p[1]**2 *(t-p[2]) *exp( -( (t-p[2]) /p[1] )**2 /2)
else :
return zeros(len(t))
def constFunc (t, p):
# p[0]: amp
return p[0]*ones(len(t))
def linearFunc (t, p):
# p[0]: amp
# p[1]: intersection
return p[0]*t+p[1]
# def set_QubitRegister():
class PulseBuilder():
def __init__( self, pts, dt ):
# self.functionName = functionName
self.time = pts *dt
self.timeResolution = dt
self.operationPts = pts
self.pulseInfo = {
"mode": "XY",
"envelope": {
"shape": 'gaussian',
"paras": [1,0.25],
},
"phase": 0,
}
self.waveform ={
"t0": 0.,
"dt": self.timeResolution,
"data": array([])
}
def arbXYGate( self, p, shape='DRAG' ):
theta, phi = p
pulseInfo = {
"envelope": {
"mode": "XY",
"shape": shape,
"paras": [theta/pi,0.25],
},
"phase": phi,
}
return self.pulseInfo.update(pulseInfo)
def rotXY( self, p, shape='fDRAG' ):
amp, sigmaRatio, dRatio, rotAxis = p
pulseInfo = {
"envelope": {
"mode": "XY",
"shape": shape,
"paras": [amp, sigmaRatio, dRatio],
},
"phase": rotAxis,
}
return self.pulseInfo.update(pulseInfo)
def idle( self, p, channel="XY" ):
pulseInfo = {
"mode": channel,
"envelope": {
"shape": 'const',
"paras": p,
},
"phase": 0,
}
return self.pulseInfo.update(pulseInfo)
def purePulse( self, p, channel="i", shape='gaussian' ):
pulseInfo = {
"mode": channel,
"envelope": {
"shape": shape,
"paras": p,
},
"phase": 0,
}
return self.pulseInfo.update(pulseInfo)
def generate_envelope( self, startTime=None ):
self.waveform["data"] = zeros( self.operationPts )
if startTime != None: self.waveform["t0"] = startTime
self.waveform["dt"] = self.timeResolution
relativeTime = linspace(0,self.time,self.operationPts,endpoint=False)
amp = self.pulseInfo["envelope"]["paras"][0]
def get_gaussian():
centerTime = self.time /2
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
p = [amp, sigma, centerTime]
wfData = gaussianFunc( relativeTime, p )
return wfData
def get_halfGaussian():
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
centerTime = 0
if sigma > 0:
centerTime = self.time
p = [amp, sigma, centerTime]
wfData = gaussianFunc( relativeTime, p )
return wfData
def get_degaussian():
centerTime = self.time /2
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
p = [amp, sigma, centerTime]
wfData = derivativeGaussianFunc( relativeTime, p )
return wfData
def get_halfDeGaussian():
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
centerTime = 0
if sigma > 0:
centerTime = self.time
p = [amp, sigma, centerTime]
wfData = derivativeGaussianFunc( relativeTime, p )
return wfData
def get_DRAG():
centerTime = self.time /2
amp = self.pulseInfo["envelope"]["paras"][0]
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
pGau = [ amp, sigma, centerTime ]
ampDGau = amp
pDGau = [ ampDGau, sigma, centerTime ]
#wfData = gaussianFunc(relativeTime, pGau )+ -1j/(hardwareInfo.Qubit.anharmonicity/1e3) *derivativeGaussianFunc(relativeTime, pDGau)
wfData = gaussianFunc(relativeTime, pGau )+ -1j *derivativeGaussianFunc(relativeTime, pDGau)
return wfData
def get_flexDRAG():
centerTime = self.time /2
amp = self.pulseInfo["envelope"]["paras"][0]
sigma = self.time *self.pulseInfo["envelope"]["paras"][1]
pGau = [ amp, sigma, centerTime ]
ampDGau = amp *self.pulseInfo["envelope"]["paras"][2]
pDGau = [ ampDGau, sigma, centerTime ]
wfData = gaussianFunc(relativeTime, pGau )+ -1j*derivativeGaussianFunc(relativeTime, pDGau)
return wfData
def get_const():
amp = self.pulseInfo["envelope"]["paras"][0]
p = [ amp ]
wfData = constFunc( relativeTime, p )
return wfData
def get_linear():
slope = self.pulseInfo["envelope"]["paras"][0]
intercept = self.pulseInfo["envelope"]["paras"][1]
p = [ slope, intercept ]
wfData = linearFunc( relativeTime, p )
return wfData
def get_ringUp():
flatHieght = self.pulseInfo["envelope"]["paras"][0]
sigmaRatio = self.pulseInfo["envelope"]["paras"][1]
edgeLength = self.pulseInfo["envelope"]["paras"][2]
peakLength = edgeLength*2
flatLength = self.time -peakLength
peakMultiplier = self.pulseInfo["envelope"]["paras"][3]
peakSigma = peakLength *sigmaRatio
startPos = edgeLength
ringPeak = flatHieght *(peakMultiplier)
endPos = startPos +flatLength
ringGauss = [ ringPeak, peakSigma, startPos ]
highPowerGauss = gaussianFunc(relativeTime, ringGauss)
startEdge = [ flatHieght, peakSigma, startPos ]
gaussUp = where( relativeTime<startPos, gaussianFunc(relativeTime, startEdge),0. )
endEdge = [ flatHieght, peakSigma, endPos ]
gaussDn = where( relativeTime>endPos, gaussianFunc(relativeTime, endEdge),0. )
step = where( (relativeTime>=startPos) & (relativeTime<=endPos), constFunc(relativeTime, [flatHieght]),0. )
wfData = highPowerGauss +gaussUp +step +gaussDn
return wfData
pulse = {
'gaussian': get_gaussian,
'gaussian_half': get_halfGaussian,
'degaussian': get_degaussian,
'degaussian_half': get_halfDeGaussian,
'DRAG': get_DRAG,
'fDRAG': get_flexDRAG,
'const': get_const,
'linear': get_linear,
'ringup': get_ringUp,
}
phaseShift = exp(1j*self.pulseInfo["phase"])
self.waveform["data"]= pulse[self.pulseInfo["envelope"]["shape"]]() *phaseShift
#print(self.waveform)
return self.waveform
def convert_XYtoIQ( self, IQMixerChannel=None ):
envelope = self.waveform["data"]
absoluteTime = get_timeAxis(self.waveform)
if IQMixerChannel != None:
phaseBalance = IQMixerChannel.phaseBalance
ampBalance = IQMixerChannel.ampBalance
(offsetI, offsetQ) = IQMixerChannel.offset
if_freq = IQMixerChannel.ifFreq/1e3 # to GHz
inverse = sign(sin(radians(phaseBalance)))
#print(self.pulseInfo["mode"])
if self.pulseInfo["mode"] == "XY":
envelopeIQ = abs( envelope )
envelopeI = envelopeIQ /cos(radians(abs(phaseBalance)-90))
envelopeQ = envelopeI /ampBalance
phi = arctan2( envelope.imag, envelope.real )
phiQ = -phi+inverse*pi/2.
sigI = envelopeI *cos( 2. *pi *if_freq *absoluteTime +phiQ +radians(phaseBalance) +pi) -offsetI
sigQ = envelopeQ *cos( 2. *pi *if_freq *absoluteTime +phiQ) -offsetQ
self.waveform["data"] = sigI+ 1j*sigQ
elif self.pulseInfo["mode"] == "i":
self.waveform["data"] = envelope*cos( 2. *pi *if_freq *absoluteTime +radians(phaseBalance) +pi) -offsetI
elif self.pulseInfo["mode"] == "q":
self.waveform["data"] = 1j*(envelope/ampBalance*cos( 2. *pi *if_freq *absoluteTime) -offsetQ)
else:
self.waveform["data"] = envelope
return self.waveform
class QubitOperationSequence():
def __init__( self, sequencePts, dt ):
self.dt = dt
self.operation = []
self.sequenceTime = sequencePts*dt # ns
self.sequencePts = sequencePts
# print("sequenceTime",sequenceTime)
# print("sequencePts",self.sequencePts)
self.xywaveform = {
"t0": 0.,
"dt": dt,
"data": array([])
}
self.iqwaveform = {
"t0": 0.,
"dt": dt,
"data": array([])
}
def set_operation( self, operation ):
self.operation = operation
endPt = int(0)
for i, op in enumerate(self.operation) :
operationPts = op.operationPts
op.waveform["t0"] = endPt*self.dt
#print("start point",endPt)
#print("op point",operationPts)
endPt += operationPts
if endPt < self.sequencePts:
op = PulseBuilder(self.sequencePts-endPt,self.dt)
op.idle([0])
self.operation.append(op)
print("Operation sequence haven't full")
elif endPt == self.sequencePts:
print("Total operations match operation sequence")
else:
op = PulseBuilder(self.sequencePts,self.dt)
op.idle([0])
self.operation = [op]
print("Too much operation, clean all sequense")
def generate_sequenceWaveform( self, mixerInfo=None, firstOperationIdx=None ):
allXYPulse = array([])
allIQPulse = array([])
t0 = 0
if len(self.operation) == 0 : # For the case with only one operation
firstOperationIdx = 0
# Convert XY to IQ language
for op in self.operation:
newPulse = op.generate_envelope()["data"]
allXYPulse = append(allXYPulse, newPulse)
if firstOperationIdx != None :
t0 = self.operation[firstOperationIdx].waveform["t0"]
elif len(self.operation) == 0 :
t0 = 0
else:
try: # Old method to get t0
t0 = self.dt * where(ceil(abs(allXYPulse))==1)[0][0]
# print(Back.WHITE + Fore.BLUE + "Pulse starting from %s ns" %pulse_starting_time)
except(IndexError):
t0 = 0
# Convert XY to IQ language
for op in self.operation:
op.waveform["t0"]-=t0
newPulse = op.convert_XYtoIQ( mixerInfo )["data"]
allIQPulse = append(allIQPulse, newPulse)
#print(len(newPulse))
self.xywaveform.update({"data":allXYPulse})
self.iqwaveform.update({"data":allIQPulse})
return self.iqwaveform
def get_timeAxis( waveform ):
dataPts = len(waveform["data"])
#print(waveform["t0"], waveform["dt"], dataPts)
return linspace( waveform["t0"], waveform["t0"]+waveform["dt"]*dataPts, dataPts, endpoint=False)
class IQMixerChannel():
def __init__ ( self ):
self.ifFreq = 91. # MHz
self.ampBalance = 1. # I/Q amp ratio compensation for SSB
self.offset = (0.,0.)
self.phaseBalance = -90 # I/Q Quadrature phase difference compensation for SSB
if __name__ == "__main__":
import matplotlib.pyplot as plt
import scipy.fft as spfft
dt = 1.
print("register IQMixerChannel")
mixerInfo = IQMixerChannel()
OPS = QubitOperationSequence(100, 1.)
print(f"set new operation")
op1 = PulseBuilder(20,dt)
op1.arbXYGate([pi,0])
op2 = PulseBuilder(50,dt)
op2.rotXY([1,0.25,5,0])
op3 = PulseBuilder(20,dt)
op3.idle([0])
print("register operation to sequence")
OPS.set_operation([op3, op2])
print("calculate XY waveform of the sequence")
OPS.generate_sequenceWaveform(mixerInfo=mixerInfo)
xyWf = OPS.xywaveform
print("calculate IQ waveform of the sequence")
iqWf = OPS.iqwaveform
plot1 = plt.figure(1)
timeAxis = get_timeAxis(xyWf)
plt.plot(timeAxis, xyWf["data"].real)
plt.plot(timeAxis, xyWf["data"].imag)
plt.plot(timeAxis, iqWf["data"].real)
plt.plot(timeAxis, iqWf["data"].imag)
plot2 = plt.figure(2)
plt.plot(xyWf["data"].real, xyWf["data"].imag)
plt.plot(iqWf["data"].real, iqWf["data"].imag)
#plot3 = plt.figure(3)
fq = 5e9
pmixer = mixerInfo.phaseBalance
fIF = mixerInfo.ifFreq/1e3
# plt.plot(timeAxis, cos(2*pi*fq*timeAxis) )
# xymix = xyWf["data"].real*cos(2*pi*fq*timeAxis) +xyWf["data"].imag*cos(2*pi*fq*timeAxis +abs(radians(pmixer)) )
# plt.plot(timeAxis, xymix)
# iqmix = iqWf["data"].real*cos(2*pi*(fq+fIF)*timeAxis) +iqWf["data"].imag*cos(2*pi*(fq+fIF)*timeAxis +radians(pmixer) )
# plt.plot(timeAxis, iqmix)
# data_points = len(timeAxis)
# f_points = data_points//2
# faxis = spfft.fftfreq(data_points,iqWf["dt"])[0:f_points]
# plot4 = plt.figure(4)
# xyvector = spfft.fft(xymix)[0:f_points]/len(timeAxis)
# plt.plot(faxis, abs(xyvector))
# iqvector = spfft.fft(iqmix)[0:f_points]/len(timeAxis)
# plt.plot(faxis, 10*log(abs(iqvector)))
plt.show()
| 33.894231 | 144 | 0.553901 |
acf2f03db3dc7296c02c093348c29292a1219d5b | 280 | py | Python | django/core/urls.py | bear-rsg/mapping-community | c96f10f005b68667dd2c7750b96bd1d7e58a22ba | [
"MIT"
] | null | null | null | django/core/urls.py | bear-rsg/mapping-community | c96f10f005b68667dd2c7750b96bd1d7e58a22ba | [
"MIT"
] | null | null | null | django/core/urls.py | bear-rsg/mapping-community | c96f10f005b68667dd2c7750b96bd1d7e58a22ba | [
"MIT"
] | null | null | null | from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
# General app's urls
path('', include('mapping-community-groups.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 25.454545 | 65 | 0.757143 |
acf2f04fc019f3a1b79c7a179daa715b5a5eabf6 | 17,602 | py | Python | addons/usps_delivery_carrier/models/usps_delivery_carrier.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | null | null | null | addons/usps_delivery_carrier/models/usps_delivery_carrier.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | null | null | null | addons/usps_delivery_carrier/models/usps_delivery_carrier.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | 1 | 2021-05-05T07:59:08.000Z | 2021-05-05T07:59:08.000Z | # -*- coding: utf-8 -*-
#################################################################################
#
# Copyright (c) 2017-Present Webkul Software Pvt. Ltd. (<https://webkul.com/>)
# You should have received a copy of the License along with this program.
# If not, see <https://store.webkul.com/license.html/>
#################################################################################
import binascii
import logging
import requests
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import SubElement
from xml.dom import minidom
from urllib.parse import quote_plus
_logger = logging.getLogger(__name__)
try:
from urllib3.exceptions import HTTPError
import urllib3
except Exception as e:
_logger.error("#WKDEBUG-1 python urllib3 library not installed .")
from odoo import api, fields, models
from odoo.exceptions import ValidationError,UserError
from openerp.addons.odoo_shipping_service_apps.tools import ensure_str
headers = {"content-type":"text/xml"}
class USPSAPI:
APIEND = dict(
rate = 'http://production.shippingapis.com/ShippingAPI.dll?',
shipping_test = 'https://stg-secure.shippingapis.com/ShippingAPI.dll?',
shipping_production = "https://secure.shippingapis.com/ShippingAPI.dll?",
tracking = "https://tools.usps.com/go/TrackConfirmAction_input",
cancel_test = "https://stg-secure.shippingapis.com/ShippingAPI.dll?",
cancel_production = "https://secure.shippingapis.com/ShippingAPI.dll?"
)
@staticmethod
def check_error(root):
error = False
for Error in root.getiterator("Error"):
error = True
if error:
message = (','.join([i.text for i in root.getiterator("Description")]))
_logger.info("%r===%r==="%(root.tag,message))
return message
@classmethod
def get_tracking_link(cls,awb):
return '%s?qtc_tLabels1=%s'%(cls.APIEND.get('tracking'),quote_plus(awb))
@staticmethod
def prettify(elem):
rough_string = ElementTree.tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent="\t")
@staticmethod
def add_text(elem, text):
elem.text = text
return elem
def __init__(self, *args, **kwargs):
self.usps_user_id = kwargs.get('usps_user_id')
self.usps_currency = kwargs.get('usps_currency')
self.usps_enviroment = kwargs.get('usps_enviroment','test')
def send_request(self,request_for,request_xml):
try:
http = urllib3.PoolManager()
api_end = self.APIEND.get(request_for)
_logger.info("USPS api_end=%r=="%(api_end))
response = requests.post(url = api_end,data = request_xml,headers = headers)
root = ElementTree.fromstring(response.content)
error = self.check_error(root)
return dict(success = 0 if error else 1, error_message=error,root=root)
except Exception as e:
_logger.warning(
"#WKDEBUG---USPS %r Exception-----%r---------"%(request_for,e))
return dict(success = False, error_message=e)
def contruct_dimension(self,root_node,data):
width =data.get('width',1)
length=data.get('length',1)
height=data.get('height',1)
girth = data.get('girth') and data.get('girth') or 2*(width+height)
self.add_text(SubElement(root_node, 'Container'),data.get('shipper_package_code'))
self.add_text(SubElement(root_node, 'Size'),data.get('usps_package_size'))
self.add_text(SubElement(root_node, 'Width'),'%s'%width)
self.add_text(SubElement(root_node, 'Length'),'%s'%length)
self.add_text(SubElement(root_node, 'Height'),'%s'%height)
self.add_text(SubElement(root_node, 'Machinable'),data.get('usps_machinable'))
return root_node
def contruct_dimension_ship(self,root_node,data):
width =data.get('width',1)
length=data.get('length',1)
height=data.get('height',1)
girth = data.get('girth') and data.get('girth') or 2*(width+height)
self.add_text(SubElement(root_node, 'Container'),data.get('shipper_package_code'))
self.add_text(SubElement(root_node, 'Width'),'%s'%width)
self.add_text(SubElement(root_node, 'Length'),'%s'%length)
self.add_text(SubElement(root_node, 'Height'),'%s'%height)
self.add_text(SubElement(root_node, 'Machinable'),data.get('usps_machinable'))
return root_node
def contruct_req_package(self,data):
Package = Element('Package',attrib={'ID': '%s'%data.get('index',1)})
self.add_text(SubElement(Package, 'Service'),data.get('usps_service_code'))
if data.get('usps_service_code')=='FIRST CLASS':
self.add_text(SubElement(Package, 'FirstClassMailType'),data.get('usps_firstclassmailtype'))
self.add_text(SubElement(Package, 'ZipOrigination'),data.get('zip_origin'))
self.add_text(SubElement(Package, 'ZipDestination'),data.get('zip_destination'))
self.add_text(SubElement(Package, 'Pounds'),'0')
self.add_text(SubElement(Package, 'Ounces'),'%s'%data.get('weight'))
self.contruct_dimension(Package,data)
self.add_text(SubElement(Package, 'ReturnLocations'),False)
return Package
def contruct_rate_request(self,packages):
Request = Element("RateV4Request",attrib={'USERID':self.usps_user_id})
for package in packages:
Request.append(package)
return Request
def send_rate_request(self,data):
response = self.send_request('rate','API=RateV4&XML=%s'%data)
if response.get('error_message'):return response
root = response.get('root')
price = 0
for rate in root.iter('Rate'):
price += float(rate.text)
return dict(price = price, currency=None,success=True)
def construct_ship_address(self,root_node,attr,data):
self.add_text(SubElement(root_node, '%sName'%attr),data.get('name'))
if data.get('company_name'):
self.add_text(SubElement(root_node, '%sFirm'%attr),data.get('company_name'))
self.add_text(SubElement(root_node, '%sAddress1'%attr),data.get('street'))
self.add_text(SubElement(root_node, '%sAddress2'%attr),data.get('street2') or '. ')
self.add_text(SubElement(root_node, '%sCity'%attr),data.get('city'))
self.add_text(SubElement(root_node, '%sState'%attr),data.get('state_code'))
self.add_text(SubElement(root_node, '%sZip5'%attr),data.get('zip'))
SubElement(root_node, '%sZip4'%attr)
return root_node
@api.model
def construct_ship_sender_recept(self,root_node,attr,data):
self.add_text(SubElement(root_node, '%sName'%attr),data.get('name'))
self.add_text(SubElement(root_node, '%sEMail'%attr),data.get('email'))
return root_node
def get_shipment_api(self):
return "eVSCertify" if self.usps_enviroment== 'test' else 'eVS'
def get_cancel_api(self):
return "eVSCancel"
def contruct_ship_request(self,data,shipper_data,recipient_data):
node = self.get_shipment_api()
Request = Element("%sRequest"%node,attrib={'USERID':self.usps_user_id})
self.add_text(SubElement(Request, 'Option'),'1')
self.add_text(SubElement(Request, 'Revision'),'2')
ImageParameters = SubElement(Request, 'ImageParameters')
if data.get('number_of_packages')>1:
LabelSequence = SubElement(ImageParameters, 'LabelSequence')
self.add_text(SubElement(LabelSequence, 'PackageNumber'),'%s'%data.get('index'))
self.add_text(SubElement(LabelSequence, 'TotalPackages'),'%s'%data.get('number_of_packages'))
self.construct_ship_address(Request,'From',shipper_data)
self.add_text(SubElement(Request,'FromPhone'),'%s'%shipper_data.get('phone'))
self.construct_ship_address(Request,'To',recipient_data)
self.add_text(SubElement(Request,'ToPhone'),'%s'%recipient_data.get('phone'))
if recipient_data.get('email'):
self.add_text(SubElement(Request, 'ToContactPreference'),'EMAIL')
self.add_text(SubElement(Request, 'ToContactEMail'),'%s'%recipient_data.get('email'))
elif recipient_data.get('phone'):
self.add_text(SubElement(Request, 'ToContactPreference'),'SMS')
self.add_text(SubElement(Request, 'ToContactMessaging'),'%s'%recipient_data.get('phone'))
self.add_text(SubElement(Request, 'AllowNonCleansedDestAddr'),'True')
self.add_text(SubElement(Request, 'WeightInOunces'),'%s'%data.get('weight'))
self.add_text(SubElement(Request, 'ServiceType'),'%s'%data.get('usps_servicetype'))
self.contruct_dimension_ship(Request,data)
self.add_text(SubElement(Request, 'CustomerRefNo'),data.get('reference'))
self.construct_ship_sender_recept(Request,'Sender',shipper_data)
self.construct_ship_sender_recept(Request,'Recipient',recipient_data)
self.add_text(SubElement(Request, 'ImageType'),'tif')
self.add_text(SubElement(Request, 'HoldForManifest'),'N')
self.add_text(SubElement(Request, 'ReturnCommitments'),'true')
return Request
def send_ship_request(self,xml_data):
node = self.get_shipment_api()
if node == "eVSCertify":
response = self.send_request('shipping_test','API=%s&XML=%s'%(node,xml_data))
else:
response = self.send_request('shipping_production','API=%s&XML=%s'%(node,xml_data))
if response.get('error_message'):return response
root = response.get('root')
tracking_result = dict()
date_delivery = root.find('Commitment').findtext('ScheduledDeliveryDate')
for tracking_number, image in zip(root.getiterator("BarcodeNumber"), root.getiterator("LabelImage")):
tracking_result[tracking_number.text]= ('USPS' + str(tracking_number.text) + '.tif', binascii.a2b_base64(image.text))
return dict(tracking_result=tracking_result,date_delivery=date_delivery)
def construct_cancel_request(self,tracking_link):
node = self.get_cancel_api()
Request = Element("%sRequest"%node,attrib={'USERID':self.usps_user_id})
self.add_text(SubElement(Request, 'BarcodeNumber'),tracking_link)
return Request
def send_cancel_request(self,xml_data):
node = self.get_cancel_api()
if self.usps_enviroment == "test":
response = self.send_request('cancel_test','API=%s&XML=%s'%(node,xml_data))
else:
response = self.send_request('cancel_production','API=%s&XML=%s'%(node,xml_data))
return response
class UspsDeliveryCarrier(models.Model):
_inherit = "delivery.carrier"
usps_user_id=fields.Char(string="User ID")
@api.model
def get_usps_packages(self,sdk, order=None):
result =[]
packaging_obj = self.env['product.packaging']
recipient = order.partner_shipping_id if order.partner_shipping_id else order.partner_id
shipper = order.warehouse_id.partner_id
package_items = self.wk_get_order_package(order=order)
items=self.wk_group_by('packaging_id',package_items)
service = self.usps_servicetype
for order_packaging_id, wk_package_ids in items:
packaging_id = packaging_obj.browse(order_packaging_id)
packaging_data = packaging_id.read(['shipper_package_code','usps_package_size','usps_machinable'])[0]
packaging_data.pop('id')
for index,package_id in enumerate(wk_package_ids,1):
data =dict(
index = index,
weight =package_id.get('weight'),
usps_service_code = service.code,
usps_firstclassmailtype = service.usps_firstclassmailtype.code,
zip_origin = shipper.zip,
zip_destination = recipient.zip,
)
data.update(packaging_data)
result.append(sdk.contruct_req_package(data))
return result
@api.model
def usps_get_shipping_price(self, order):
recipient = order.partner_shipping_id if order.partner_shipping_id else order.partner_id
shipper = order.warehouse_id.partner_id
currency_id = self.get_shipment_currency_id(order)
currency_code = currency_id.name
price = 0
config = self.wk_get_carrier_settings(['usps_user_id','prod_environment'])
if config['prod_environment']:
config['usps_enviroment'] = 'production'
config['usps_currency'] = currency_code
sdk = USPSAPI(**config)
config = self.wk_get_carrier_settings(['usps_user_id','prod_environment'])
if config['prod_environment']:
config['usps_enviroment'] = 'production'
packages =self.get_usps_packages(sdk,order=order)
rate_req = sdk.contruct_rate_request(packages)
rate_req_xml = sdk.prettify(rate_req)
response = sdk.send_rate_request(rate_req_xml)
response['currency'] = currency_code
response['currency_id'] =currency_id
return response
@api.model
def usps_rate_shipment(self, order):
response = self.usps_get_shipping_price(order)
if not response.get('error_message'):response['error_message'] = None
if not response.get('price'):response['price'] = 0
if not response.get('warning_message'):response['warning_message'] = None
if not response.get('success'):return response
price = self.convert_shipment_price(response)
response['price'] = price
return response
@api.one
def usps_send_shipping(self, pickings):
self.wk_validate_data(pickings=pickings)
result = {'exact_price': 0, 'weight': 0, "date_delivery": None,
'tracking_number': '', 'attachments': []}
currency_id = self.get_shipment_currency_id(pickings=pickings)
currency_code = currency_id.name
total_package = 0
shipper_info = self.get_shipment_shipper_address(picking=pickings)
shipper_info['company_name'] = pickings.company_id and pickings.company_id.name or ""
recipient_info = self.get_shipment_recipient_address(picking=pickings)
recipient_info['company_name'] = pickings.company_id and pickings.company_id.name or ""
config = self.wk_get_carrier_settings(['usps_user_id','prod_environment'])
config['usps_currency'] = currency_code
if config['prod_environment']:
config['usps_enviroment'] = 'production'
sdk = USPSAPI(**config)
package_ids = pickings.package_ids
service = self.usps_servicetype.code
reference = pickings.origin
number_of_packages = len(pickings.package_ids)
tracking_numbers = []
for index,package_id in enumerate(package_ids,1):
weight=self._get_api_weight(package_id.shipping_weight) or (self.default_product_weight)
pkg_data = package_id.packaging_id.read(['height', 'width','length','cover_amount','shipper_package_code',
'usps_package_size','usps_machinable','usps_signature'])[0]
ship_data = dict(
weight =weight,
index = index,
number_of_packages =number_of_packages,
usps_servicetype =service,
reference = reference,
)
ship_data.update(pkg_data)
ship_request = sdk.contruct_ship_request(ship_data,shipper_info,recipient_info)
ship_req_xml = sdk.prettify(ship_request)
response = sdk.send_ship_request(ship_req_xml)
if response.get('error_message'):
raise ValidationError(response.get('error_message'))
tracking_result = response.get('tracking_result')
tracking_numbers+=tracking_result.keys()
result['weight'] += weight
if tracking_result:
result['attachments'] += list(tracking_result.values())
result['tracking_number'] += ','.join(tracking_numbers)
pickings.number_of_packages= number_of_packages
return result
@api.model
def usps_get_tracking_link(self, pickings):
return USPSAPI.get_tracking_link(pickings.carrier_tracking_ref)
@api.model
def usps_cancel_shipment(self, pickings):
currency_id = self.get_shipment_currency_id(pickings=pickings)
currency_code = currency_id.name
config = self.wk_get_carrier_settings(['usps_user_id','prod_environment'])
config['usps_currency'] = currency_code
if config['prod_environment']:
config['usps_enviroment'] = 'production'
sdk = USPSAPI(**config)
tracking_link = pickings.carrier_tracking_ref
cancel_request = sdk.construct_cancel_request(tracking_link)
cancel_req_xml = sdk.prettify(cancel_request)
response = sdk.send_cancel_request(cancel_req_xml)
if response:
if response['success']:
if response['root'].findtext('Status') == "Not Cancelled":
raise UserError("Shipment Cancellation Error:\n\nMessage: {}".format(response['root'].findtext('Reason')))
else :
raise UserError(response['error_message'])
else:
raise UserError("Could not fetch response. Please check your request data !!!") | 48.490358 | 129 | 0.657255 |
acf2f0975651667714ee3db02be0cb965d61fe4d | 161 | py | Python | Solutions by Programming Task/Fibonacci sequence/Python/YCombinator.py | iriszero48/Trash | f93c7f36eb860ae15e5c95db6d1d28ede10698c2 | [
"MIT"
] | null | null | null | Solutions by Programming Task/Fibonacci sequence/Python/YCombinator.py | iriszero48/Trash | f93c7f36eb860ae15e5c95db6d1d28ede10698c2 | [
"MIT"
] | null | null | null | Solutions by Programming Task/Fibonacci sequence/Python/YCombinator.py | iriszero48/Trash | f93c7f36eb860ae15e5c95db6d1d28ede10698c2 | [
"MIT"
] | null | null | null | Fib = lambda x: (lambda f: (lambda x: x(x))(lambda y: f(lambda *args: y(y)(*args))))(lambda f: lambda n: 0 if n == 0 else (1 if n == 1 else f(n-1) + f(n-2)))(x)
| 80.5 | 160 | 0.552795 |
acf2f0b4615a1465f8337ea40170fea6dc405dc3 | 6,556 | py | Python | paddlemm/metrics/fusion.py | njustkmg/PaddleMM | 92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5 | [
"Apache-2.0"
] | 42 | 2022-01-05T13:49:48.000Z | 2022-03-30T20:20:18.000Z | paddlemm/metrics/fusion.py | njustkmg/PaddleMM | 92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5 | [
"Apache-2.0"
] | null | null | null | paddlemm/metrics/fusion.py | njustkmg/PaddleMM | 92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5 | [
"Apache-2.0"
] | 5 | 2022-01-19T00:27:24.000Z | 2022-03-23T08:29:50.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
def average_precision(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise y(i,j)=0
:return: the average precision
"""
def cal_single_instance(x, y):
idx = np.argsort(-x)
y = y[idx]
correct = 0
prec = 0
num = 0
for i in range(x.shape[0]):
if y[i] == 1:
num += 1
correct += 1
prec += correct / (i + 1)
return prec / num
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
aveprec = 0
m = 0
for i in range(n):
s = np.sum(y[i])
if s in range(1, d):
aveprec += cal_single_instance(x[i], y[i])
m += 1
aveprec /= m
return aveprec
def coverage(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the test instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise y(i,j)=0
:return: the coverage
"""
def cal_single_instance(x, y):
idx = np.argsort(x)
y = y[idx]
loc = x.shape[0]
for i in range(x.shape[0]):
if y[i] == 1:
loc -= i
break
return loc
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
cover = 0
for i in range(n):
cover += cal_single_instance(x[i], y[i])
cover = cover / n - 1
return cover
def example_auc(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise y(i,j)=0
:return: the example auc
"""
def cal_single_instance(x, y):
idx = np.argsort(x)
y = y[idx]
m = 0
n = 0
auc = 0
for i in range(x.shape[0]):
if y[i] == 1:
m += 1
auc += n
if y[i] == 0:
n += 1
auc /= (m * n)
return auc
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
m = 0
auc = 0
for i in range(n):
s = np.sum(y[i])
if s in range(1, d):
auc += cal_single_instance(x[i], y[i])
m += 1
auc /= m
return auc
def macro_auc(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise y(i,j)=0
:return: the macro auc
"""
def cal_single_label(x, y):
idx = np.argsort(x)
y = y[idx]
m = 0
n = 0
auc = 0
for i in range(x.shape[0]):
if y[i] == 1:
m += 1
auc += n
if y[i] == 0:
n += 1
auc /= (m * n)
return auc
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
auc = 0
num = 0
for i in range(d):
s = np.sum(y[:, i])
if s in range(1, n):
num += 1
auc += cal_single_label(x[:, i], y[:, i])
auc /= num
return auc
def micro_auc(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise y(i,j)=0
:return: the micro auc
"""
def cal_single_label(x, y):
idx = np.argsort(x)
y = y[idx]
m = 0
n = 0
auc = 0
for i in range(x.shape[0]):
if y[i] == 1:
m += 1
auc += n
if y[i] == 0:
n += 1
auc /= (m * n)
return auc
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
x = x.reshape(n * d)
y = y.reshape(n * d)
auc = cal_single_label(x, y)
return auc
def ranking_loss(x, y):
"""
:param x: the predicted outputs of the classifier, the output of the ith instance for the jth class is stored in x(i,j)
:param y: the actual labels of the test instances, if the ith instance belong to the jth class, y(i,j)=1, otherwise x(i,j)=0
:return: the ranking loss
"""
def cal_single_instance(x, y):
idx = np.argsort(x)
y = y[idx]
m = 0
n = 0
rl = 0
for i in range(x.shape[0]):
if y[i] == 1:
m += 1
if y[i] == 0:
rl += m
n += 1
rl /= (m * n)
return rl
n, d = x.shape
if x.shape[0] != y.shape[0]:
print("num of instances for output and ground truth is different!!")
if x.shape[1] != y.shape[1]:
print("dim of output and ground truth is different!!")
m = 0
rank_loss = 0
for i in range(n):
s = np.sum(y[i])
if s in range(1, d):
rank_loss += cal_single_instance(x[i], y[i])
m += 1
rank_loss /= m
return rank_loss | 30.211982 | 129 | 0.502135 |
acf2f1aa1f7e5256253ccc0745129309bf57cea7 | 1,521 | py | Python | airline/preprocess.py | arita37/data | 0d1a38fd9b564cfb9c34ad521e7df2b3b6e2316b | [
"MIT"
] | null | null | null | airline/preprocess.py | arita37/data | 0d1a38fd9b564cfb9c34ad521e7df2b3b6e2316b | [
"MIT"
] | null | null | null | airline/preprocess.py | arita37/data | 0d1a38fd9b564cfb9c34ad521e7df2b3b6e2316b | [
"MIT"
] | 1 | 2022-02-14T18:18:38.000Z | 2022-02-14T18:18:38.000Z | import pandas as pd
import random
import os
import sys
import numpy as np
# Read File
path_repo_root = os.path.dirname(os.path.dirname(
os.path.dirname(os.path.abspath(__file__)))) + "/"
print("path_repo_root", path_repo_root)
sys.path.append(path_repo_root)
folder = 'raw/'
train = pd.read_csv(folder+'train.csv')
test = pd.read_csv(folder+'test.csv')
train.drop(['Unnamed: 0'], axis=1, inplace=True)
train.drop(['Arrival Delay in Minutes'], axis=1, inplace=True)
test.drop(['Unnamed: 0'], axis=1, inplace=True)
test.drop(['Arrival Delay in Minutes'], axis=1, inplace=True)
train.satisfaction = [1 if each ==
"satisfied" else 0 for each in train.satisfaction]
test.satisfaction = [1 if each ==
"satisfied" else 0 for each in test.satisfaction]
# saving train
feature_tr = train.drop(["satisfaction"], axis=1)
target_tr = train[["satisfaction", "id"]]
feature_tr.to_csv("train/features.csv", index=False)
target_tr.to_csv("train/target.csv", index=False)
features = dict(method='zip', archive_name='features.csv')
target = dict(method='zip', archive_name='target.csv')
feature_tr.to_csv('train/features.zip', index=False, compression=features)
target_tr.to_csv('train/target.zip', index=False, compression=target)
# saving test
feature_test = test.drop(["satisfaction"], axis=1)
target_test = test[["satisfaction", "id"]]
feature_test.to_csv('test/features.zip', index=False, compression=features)
target_test.to_csv('test/target.zip', index=False, compression=target)
| 37.097561 | 75 | 0.723866 |
acf2f262fcb80cd51e0b90f4cd73eb3a9f9f4b68 | 157 | py | Python | personal_site/apps.py | sebastianmihai01/sebastianmihai01.github.io | 3dbba3643ba46f1bc9971f8e162bcf598fbdecff | [
"MIT"
] | 2 | 2021-09-26T09:45:09.000Z | 2021-09-29T11:35:59.000Z | personal_site/apps.py | sebastianmihai01/sebastianmihai01.github.io | 3dbba3643ba46f1bc9971f8e162bcf598fbdecff | [
"MIT"
] | null | null | null | personal_site/apps.py | sebastianmihai01/sebastianmihai01.github.io | 3dbba3643ba46f1bc9971f8e162bcf598fbdecff | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class PersonalSiteConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'personal_site'
| 22.428571 | 56 | 0.77707 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.